package com.ximple.eofms.jobs; import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FilenameFilter; import java.io.IOException; import java.math.BigDecimal; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.FileChannel; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.TreeMap; import com.ximple.eofms.jobs.context.AbstractOracleJobContext; import com.ximple.eofms.jobs.context.edbgeo.FeatureDgnConvertEdbGeoJobContext; import com.ximple.eofms.jobs.context.edbgeo.GeneralDgnConvertEdbGeoJobContext; import com.ximple.eofms.jobs.context.edbgeo.IndexDgnConvertEdbGeoJobContext; import com.ximple.eofms.jobs.context.edbgeo.OracleConvertEdbGeoJobContext; import com.ximple.eofms.util.BinConverter; import com.ximple.eofms.util.ByteArrayCompressor; import com.ximple.eofms.util.FileUtils; import com.ximple.eofms.util.StringUtils; import com.ximple.io.dgn7.ComplexElement; import com.ximple.io.dgn7.Dgn7fileException; import com.ximple.io.dgn7.Dgn7fileReader; import com.ximple.io.dgn7.Element; import com.ximple.io.dgn7.ElementType; import com.ximple.io.dgn7.IElementHandler; import com.ximple.io.dgn7.Lock; import com.ximple.util.PrintfFormat; import oracle.jdbc.OracleConnection; import oracle.jdbc.OracleResultSet; import oracle.sql.ARRAY; import oracle.sql.BLOB; import org.apache.commons.collections.OrderedMap; import org.apache.commons.collections.OrderedMapIterator; import org.apache.commons.collections.map.LinkedMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.geotools.data.DataStore; import org.geotools.data.Transaction; import org.geotools.data.edbgeo.PostgisDataStoreFactory; import org.geotools.data.jdbc.JDBCUtils; import org.geotools.feature.SchemaException; import org.geotools.jdbc.JDBCDataStore; import org.opengis.feature.IllegalAttributeException; import org.quartz.JobDataMap; import org.quartz.JobDetail; import org.quartz.JobExecutionContext; import org.quartz.JobExecutionException; public class OracleConvertDgn2EdbGeoJob extends AbstractOracleDatabaseJob { final static Log logger = LogFactory.getLog(OracleConvertDgn2EdbGeoJob.class); private static final String EDBHOST = "EDBHOST"; private static final String EDBDATBASE = "EDBDATBASE"; private static final String EDBPORT = "EDBPORT"; private static final String EDBSCHEMA = "EDBSCHEMA"; private static final String EDBUSER = "EDBUSER"; private static final String EDBPASS = "EDBPASS"; private static final String USEWKB = "USEWKB"; private static final boolean useTpclidText = false; private static final int FETCHSIZE = 30; private static final int COMMITSIZE = 100; private static final String INDEXPATHNAME = "index"; private static final String OTHERPATHNAME = "other"; protected static class Pair { Object first; Object second; public Pair(Object first, Object second) { this.first = first; this.second = second; } } protected static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory(); protected String _edbHost; protected String _edbDatabase; protected String _edbPort; protected String _edbSchema; protected String _edbUsername; protected String _edbPassword; protected String _edbUseWKB; protected Map edbProperties; protected JDBCDataStore targetDataStore; // protected OracleConvertEdbGeoJobContext oracleJobContext; private long queryTime = 0; private long queryTimeStart = 0; public Log getLogger() { return logger; } protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, boolean profileMode, boolean useTransform) { return new OracleConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, filterPath, profileMode, useTransform); } protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { super.extractJobConfiguration(jobDetail); JobDataMap dataMap = jobDetail.getJobDataMap(); _edbHost = dataMap.getString(EDBHOST); _edbDatabase = dataMap.getString(EDBDATBASE); _edbPort = dataMap.getString(EDBPORT); _edbSchema = dataMap.getString(EDBSCHEMA); _edbUsername = dataMap.getString(EDBUSER); _edbPassword = dataMap.getString(EDBPASS); _edbUseWKB = dataMap.getString(USEWKB); Log logger = getLogger(); /* logger.info("EDBHOST=" + _myHost); logger.info("EDBDATBASE=" + _myDatabase); logger.info("EDBPORT=" + _myPort); logger.info("EDBSCHEMA=" + _mySchema); logger.info("EDBUSER=" + _myUsername); logger.info("EDBPASS=" + _myPassword); logger.info("USEWKB=" + _myUseWKB); */ if (_edbHost == null) { logger.warn("EDBHOST is null"); throw new JobExecutionException("Unknown EdbGeoSpatial host."); } if (_edbDatabase == null) { logger.warn("PGDATABASE is null"); throw new JobExecutionException("Unknown EdbGeoSpatial database."); } if (_edbPort == null) { logger.warn("EDBPORT is null"); throw new JobExecutionException("Unknown EdbGeoSpatial port."); } if (_edbSchema == null) { logger.warn("EDBSCHEMA is null"); throw new JobExecutionException("Unknown EdbGeoSpatial schema."); } if (_edbUsername == null) { logger.warn("PGUSERNAME is null"); throw new JobExecutionException("Unknown EdbGeoSpatial username."); } if (_edbPassword == null) { logger.warn("PGPASSWORD is null"); throw new JobExecutionException("Unknown EdbGeoSpatial password."); } Map remote = new TreeMap(); remote.put("dbtype", "edbgeo"); remote.put("charset", "UTF-8"); remote.put("host", _edbHost); remote.put("port", _edbPort); remote.put("database", _edbDatabase); remote.put("user", _edbUsername); remote.put("passwd", _edbPassword); remote.put("namespace", null); edbProperties = remote; } public void execute(JobExecutionContext context) throws JobExecutionException { // Every job has its own job detail JobDetail jobDetail = context.getJobDetail(); // The name is defined in the job definition String jobName = jobDetail.getKey().getName(); // Log the time the job started logger.info(jobName + " fired at " + new Date()); extractJobConfiguration(jobDetail); createSourceDataStore(); createTargetDataStore(); if (getSourceDataStore() == null) { logger.warn("Cannot connect source oracle database."); throw new JobExecutionException("Cannot connect source oracle database."); } if (getTargetDataStore() == null) { logger.warn("Cannot connect source postgreSQL database."); throw new JobExecutionException("Cannot connect source postgreSQL database."); } if (isProfileMode()) { queryTime = 0; } long t1 = System.currentTimeMillis(); String targetSchemaName; try { logger.info("-- step:clearOutputDatabase --"); clearOutputDatabase(); targetSchemaName = determineTargetSchemaName(); if (checkConvertFile()) { logger.info("-- step:convertIndexDesignFile --"); long tStep = System.currentTimeMillis(); convertIndexDesignFile(context, targetSchemaName); if (isProfileMode()) { long tStepEnd = System.currentTimeMillis(); logTimeDiff("Profile-convertIndexDesignFile", tStep, tStepEnd); } logger.info("-- step:convertOtherDesignFile --"); tStep = System.currentTimeMillis(); convertOtherDesignFile(context, targetSchemaName); if (isProfileMode()) { long tStepEnd = System.currentTimeMillis(); logTimeDiff("Profile-convertOtherDesignFile", tStep, tStepEnd); } } if (checkConvertDB()) { logger.info("-- step:convertOracleDB --"); OracleConvertEdbGeoJobContext jobContext = (OracleConvertEdbGeoJobContext) prepareJobContext(targetSchemaName, _filterPath, isProfileMode(), isTransformed()); jobContext.setSourceDataStore(getSourceDataStore()); // jobContext.setConvertElementIn(_convertElementIn); jobContext.setElementLogging(checkElementLogging()); jobContext.setExecutionContext(context); createHibernateSequence(jobContext); long tStep = System.currentTimeMillis(); if (isCopyConnectivityMode()) { copyConnectivity(jobContext); } if (isProfileMode()) { long tStepEnd = System.currentTimeMillis(); logTimeDiff("Profile-Copy Connectivity", tStep, tStepEnd); } for (String orgSchema : _orgSchema) { logger.info("----- start schema:" + orgSchema + " -----"); if (isProfileMode()) { jobContext.resetProcessTime(); jobContext.resetUpdateTime(); } tStep = System.currentTimeMillis(); exetcuteConvert(jobContext, orgSchema, _dataPath); //close all open filewriter instance jobContext.closeFeatureWriter(); if (isProfileMode()) { logger.warn("Profile-Current Query Oracle Cost-" + ((int) ((getQueryTime()) / 60000.0)) + " min - " + (((int) ((getQueryTime()) % 60000.0)) / 1000) + " sec"); long tStepEnd = System.currentTimeMillis(); logger.warn("Profile-Current Process Cost-" + ((int) ((getProcessTime()) / 60000.0)) + " min - " + (((int) ((getProcessTime()) % 60000.0)) / 1000) + " sec"); logger.warn("Profile-Current Update Cost-" + ((int) ((getUpdateTime()) / 60000.0)) + " min - " + (((int) ((getUpdateTime()) % 60000.0)) / 1000) + " sec"); logger.warn("Profile-Current JobContext Process Cost-" + ((int) ((jobContext.getProcessTime()) / 60000.0)) + " min - " + (((int) ((jobContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); logger.warn("Profile-Current JobContext Update Cost-" + ((int) ((jobContext.getUpdateTime()) / 60000.0)) + " min - " + (((int) ((jobContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); logTimeDiff("Profile-Convert[" + orgSchema + "]", tStep, tStepEnd); resetQueryTime(); resetProcessTime(); resetUpdateTime(); } } jobContext.closeOracleConnection(); } if (checkConvertElementIn()) { logger.info("-- step:convertFeatureDesignFile --"); long tStep = System.currentTimeMillis(); convertFeatureDesignFile(context, targetSchemaName); if (isProfileMode()) { long tStepEnd = System.currentTimeMillis(); logTimeDiff("Profile-convertFeatureDesignFile", tStep, tStepEnd); } } if (checkCreateDummy()) { logger.info("-- step:createDummyFeatureFile --"); createDummyFeatureFile(context); } long t2 = System.currentTimeMillis(); // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); logTimeDiff("Total ", t1, t2); updateRepoStatusToReady(targetSchemaName); } catch (SQLException e) { disconnect(); logger.warn(e.getMessage(), e); throw new JobExecutionException("Database error. " + e.getMessage(), e); } catch (IOException ex) { disconnect(); logger.warn(ex.getMessage(), ex); throw new JobExecutionException("IO error. " + ex.getMessage(), ex); } finally { disconnect(); } logger.warn(jobName + " end at " + new Date()); } private void logTimeDiff(String message, long tBefore, long tCurrent) { logger.warn(message + ":use time = " + ((int) ((tCurrent - tBefore) / 60000.0)) + " min - " + (((int) ((tCurrent - tBefore) % 60000.0)) / 1000) + " sec"); } /** * Connectivity�ƻs�@�Ӫ����A�b�d�߹q�y��V�ɥΨӤ��OMS��Ʈw���q���s����(Connectivity) * * @param jobContext job context * @throws SQLException sql exception */ private void copyConnectivity(OracleConvertEdbGeoJobContext jobContext) throws SQLException { Connection connection = jobContext.getOracleConnection(); ResultSet rsMeta = connection.getMetaData().getTables(null, "BASEDB", AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME + "%", new String[]{"TABLE"}); boolean found = false; try { while (rsMeta.next()) { String tablename = rsMeta.getString(3); if (AbstractOracleJobContext.CONNECTIVITY_WEBCHECK_NAME.equalsIgnoreCase(tablename)) { found = true; break; } } // } catch (SQLException e) } finally { if (rsMeta != null) { rsMeta.close(); rsMeta = null; } } Statement stmt = connection.createStatement(); if (found) { stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); } else { logger.info("Create CONNECTIVITY_WEBCHECK table."); stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK); stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_1); stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_2); stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_3); stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_4); stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_5); stmt.execute(AbstractOracleJobContext.CREATE_CONNECTIVITY_WEBCHECK_INDEX_6); stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_1); stmt.execute(AbstractOracleJobContext.ALTER_CONNECTIVITY_WEBCHECK_2); } stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); stmt.close(); } private void createHibernateSequence(OracleConvertEdbGeoJobContext jobContext) throws SQLException { Connection connection = jobContext.getOracleConnection(); try { Statement stmt = connection.createStatement(); stmt.execute(AbstractOracleJobContext.CREATE_HIBERNATE_SEQUENCE); stmt.close(); } catch (SQLException e) { logger.warn("HIBERNATE_SEQUENCE is already exist."); } } private void exetcuteConvert(OracleConvertEdbGeoJobContext jobContext, String querySchema, String targetSchemaName) throws SQLException { int order = 0; OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(), querySchema, "SD$SPACENODES", null); logger.info("begin convert job:[" + map.size() + "]:testmode=" + _testMode); int total = map.size(); //spacenodes count int step = total / 100; int current = 0; if (total == 0) { logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is zero."); return; } logger.warn("SELECT COUNT FROM " + querySchema + ".SD$SPACENODES is " + map.size() ); //jobContext.startTransaction(); jobContext.setCurrentSchema(querySchema); jobContext.getExecutionContext().put("ConvertDgn2EdbGeoJobProgress", 0); for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext();) { it.next(); Pair pair = (Pair) it.getValue(); String tableSrc = (String) pair.first; logger.info("begin convert:[" + order + "]-" + tableSrc); queryIgsetElement(jobContext, querySchema, tableSrc); order++; if (_testMode) { if ((_testCount < 0) || (order >= _testCount)) break; } if ((order % COMMITSIZE) == 0) { // OracleConnection connection = jobContext.getOracleConnection(); // connection.commitTransaction(); jobContext.commitTransaction(); //jobContext.startTransaction(); System.gc(); System.runFinalization(); } if( step != 0) { int now = order % step; if (now != current) { current = now; jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", current); } }else { jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", current); current++; } } jobContext.getExecutionContext().put("ConvertDgn2EdbGeoSpatialJob", 100); jobContext.commitTransaction(); jobContext.resetFeatureContext(); if (isProfileMode()) { } logger.info("end convert job:[" + order + "]"); System.gc(); System.runFinalization(); } protected OrderedMap getBlobStorageList(Connection connection, String schemaSrc, String tableSrc, OrderedMap orderedMap) throws SQLException { if (orderedMap == null) orderedMap = new LinkedMap(99); String fetchStmtFmt = "SELECT SNID, SPACETABLE FROM \"%s\".\"%s\""; PrintfFormat spf = new PrintfFormat(fetchStmtFmt); String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc}); Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); ResultSet rs = null; stmt.setFetchSize(FETCHSIZE); try { rs = stmt.executeQuery(fetchStmt); int size = rs.getMetaData().getColumnCount(); while (rs.next()) { Object[] values = new Object[size]; for (int i = 0; i < size; i++) { values[i] = rs.getObject(i + 1); } Integer key = ((BigDecimal) values[0]).intValue(); String name = (String) values[1]; Pair pair = (Pair) orderedMap.get(key); if (pair == null) orderedMap.put(key, new Pair(name, null)); else pair.first = name; } } catch (SQLException e) { logger.error(e.toString(), e); logger.error("stmt=" + fetchStmt); throw e; } finally { JDBCUtils.close(rs); JDBCUtils.close(stmt); } return orderedMap; } protected OrderedMap getRawFormatStorageList(OracleConnection connection, String schemaSrc, String tableSrc, OrderedMap orderedMap) throws SQLException { if (orderedMap == null) orderedMap = new LinkedMap(99); String fetchStmtFmt = "SELECT RNID, SPACETABLE FROM \"%s\".\"%s\""; PrintfFormat spf = new PrintfFormat(fetchStmtFmt); String fetchStmt = spf.sprintf(new Object[]{schemaSrc, tableSrc}); Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); stmt.setFetchSize(FETCHSIZE); ResultSet rs = stmt.executeQuery(fetchStmt); try { int size = rs.getMetaData().getColumnCount(); while (rs.next()) { Object[] values = new Object[size]; for (int i = 0; i < size; i++) { values[i] = rs.getObject(i + 1); } Integer key = ((BigDecimal) values[0]).intValue(); String name = (String) values[1]; Pair pair = (Pair) orderedMap.get(key); if (pair == null) orderedMap.put(key, new Pair(null, name)); else pair.second = name; } } finally { JDBCUtils.close(rs); JDBCUtils.close(stmt); } return orderedMap; } protected void queryIgsetElement(OracleConvertEdbGeoJobContext jobContext, String srcschema, String srctable) throws SQLException { Connection connection = jobContext.getOracleConnection(); String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID"; //String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" WHERE TAG_SFSC = 423 AND TAG_LUFID = 21612065 ORDER BY ROWID"; PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt); String fetchSrcStmt = spf.sprintf(new Object[]{srcschema, srctable}); Statement stmtSrc = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); stmtSrc.setFetchSize(FETCHSIZE); ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt); int igdsMetaType = rsSrc.getMetaData().getColumnType(1); while (rsSrc.next()) { if (isProfileMode()) { markQueryTime(); } byte[] raw = null; if (igdsMetaType == Types.BLOB) { BLOB blob = (BLOB) rsSrc.getBlob(1); try { raw = getBytesFromBLOB(blob); } catch (BufferOverflowException e) { logger.warn("Wrong Element Structure-", e); } finally { // blob.close(); } } else { raw = rsSrc.getBytes(1); } try { if (raw != null) { Element element = fetchBinaryElement(raw); if (isProfileMode()) { accumulateQueryTime(); } jobContext.putFeatureCollection(element); } else { if (isProfileMode()) { accumulateQueryTime(); } } } catch (Dgn7fileException e) { logger.warn("Dgn7Exception", e); } } JDBCUtils.close(rsSrc); JDBCUtils.close(stmtSrc); } protected void queryRawElement(OracleConvertEdbGeoJobContext jobContext, String srcschema, String srctable) throws SQLException { Connection connection = jobContext.getOracleConnection(); String fetchDestStmtFmt = "SELECT ELEMENT FROM \"%s\".\"%s\" ORDER BY ROWID"; PrintfFormat spf = new PrintfFormat(fetchDestStmtFmt); String fetchDestStmt = spf.sprintf(new Object[]{srcschema, srctable}); Statement stmtDest = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); stmtDest.setFetchSize(FETCHSIZE); ResultSet rsDest = stmtDest.executeQuery(fetchDestStmt); try { while (rsDest.next()) { ARRAY rawsValue = ((OracleResultSet) rsDest).getARRAY(1); long[] rawData = rawsValue.getLongArray(); byte[] comparessedValue; /* if (dataMode == TransferTask.DataMode.Normal) { comparessedValue = BinConverter.unmarshalByteArray(rawData, true); } else { comparessedValue = BinConverter.unmarshalCompactByteArray(rawData); } */ comparessedValue = BinConverter.unmarshalByteArray(rawData, true); byte[] rawDest = ByteArrayCompressor.decompressByteArray(comparessedValue); try { Element element = fetchBinaryElement(rawDest); jobContext.putFeatureCollection(element); } catch (Dgn7fileException e) { logger.warn("Dgn7Exception:" + e.getMessage(), e); } } } finally { JDBCUtils.close(rsDest); JDBCUtils.close(stmtDest); } } // Binary to Element private Element fetchBinaryElement(byte[] raws) throws Dgn7fileException { ByteBuffer buffer = ByteBuffer.wrap(raws); buffer.order(ByteOrder.LITTLE_ENDIAN); short signature = buffer.getShort(); // byte type = (byte) (buffer.get() & 0x7f); byte type = (byte) ((signature >>> 8) & 0x007f); // silly Bentley say contentLength is in 2-byte words // and ByteByffer uses raws. // track the record location int elementLength = (buffer.getShort() * 2) + 4; ElementType recordType = ElementType.forID(type); IElementHandler handler; handler = recordType.getElementHandler(); Element dgnElement = (Element) handler.read(buffer, signature, elementLength); if (recordType.isComplexElement() && (elementLength < raws.length)) { int offset = elementLength; while (offset < (raws.length - 4)) { buffer.position(offset); signature = buffer.getShort(); type = (byte) ((signature >>> 8) & 0x007f); elementLength = (buffer.getShort() * 2) + 4; if (raws.length < (offset + elementLength)) { logger.debug("Length not match:" + offset + ":" + buffer.position() + ":" + buffer.limit()); break; } recordType = ElementType.forID(type); handler = recordType.getElementHandler(); if (handler != null) { Element subElement = (Element) handler.read(buffer, signature, elementLength); ((ComplexElement) dgnElement).add(subElement); offset += elementLength; } else { byte[] remain = new byte[buffer.remaining()]; System.arraycopy(raws, offset, remain, 0, buffer.remaining()); for (int i = 0; i < remain.length; i++) { if (remain[i] != 0) { logger.info("fetch element has some error. index=" + (offset + i) + ":value=" + remain[i]); } } break; } } } return dgnElement; } /** * �����ഫ���޹��ɪ��u�@ * * @param context �u�@�������� * @throws org.quartz.JobExecutionException * exception */ private void convertIndexDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { File indexDir = new File(getDataPath(), INDEXPATHNAME); if (!indexDir.exists()) { logger.info("index dir=" + indexDir + " not exist."); return; } if (!indexDir.isDirectory()) { logger.info("index dir=" + indexDir + " is not a directory."); } List dgnFiles = FileUtils.recurseDir(indexDir, new FileFilter() { public boolean accept(File pathname) { return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn"); } }); for (File dgnFile : dgnFiles) { if (dgnFile.isDirectory()) continue; IndexDgnConvertEdbGeoJobContext convertContext = new IndexDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, isProfileMode(), isTransformed()); logger.info("--- start index dgnfile-" + dgnFile.toString() + " ---"); FileInputStream fs = null; FileChannel fc = null; Dgn7fileReader reader = null; try { convertContext.clearOutputDatabase(); convertContext.setExecutionContext(context); String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); convertContext.startTransaction(); fs = new FileInputStream(dgnFile); fc = fs.getChannel(); reader = new Dgn7fileReader(fc, new Lock()); convertContext.setReader(reader); scanIndexDgnElement(convertContext); convertContext.commitTransaction(); convertContext.closeFeatureWriter(); System.gc(); System.runFinalization(); } catch (FileNotFoundException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (Dgn7fileException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (IOException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (IllegalAttributeException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (SchemaException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } finally { convertContext.closeFeatureWriter(); if (reader != null) { try { reader.close(); } catch (IOException e) { logger.warn(e.getMessage(), e); } } if (fs != null) { try { fs.close(); } catch (IOException e) { logger.warn(e.getMessage(), e); } } if (isProfileMode()) { logger.warn("Profile-Current convertContext Process Cost-" + ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " + (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); logger.warn("Profile-Current convertContext Update Cost-" + ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " + (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); } } } } protected void scanIndexDgnElement(IndexDgnConvertEdbGeoJobContext convertContext) throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { Dgn7fileReader reader = convertContext.getReader(); int count = 0; Element lastComplex = null; while (reader.hasNext()) { if (isProfileMode()) markProcessTime(); Element.FileRecord record = reader.nextElement(); if (record.element() != null) { Element element = (Element) record.element(); ElementType type = element.getElementType(); if ((!type.isComplexElement()) && (!element.isComponentElement())) { if (lastComplex != null) { processIndexElement(lastComplex, convertContext); lastComplex = null; } processIndexElement(element, convertContext); } else if (element.isComponentElement()) { if (lastComplex != null) { ((ComplexElement) lastComplex).add(element); } } else if (type.isComplexElement()) { if (lastComplex != null) { processIndexElement(lastComplex, convertContext); } lastComplex = element; } } count++; } if (lastComplex != null) { processIndexElement(lastComplex, convertContext); } logger.debug("ElementRecord Count=" + count); } private void processIndexElement(Element element, IndexDgnConvertEdbGeoJobContext convertContext) throws IllegalAttributeException, SchemaException { //if (useTpclidText) { // if (element instanceof TextElement) { // convertContext.putFeatureCollection(element); // } //} else { // if (element instanceof ShapeElement) { convertContext.putFeatureCollection(element); // } //} } /** * �����ഫ��L�]�p���ɪ��u�@ * * @param context jobContext * @throws org.quartz.JobExecutionException * exception */ private void convertOtherDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { File otherDir = new File(getDataPath(), OTHERPATHNAME); if (!otherDir.exists()) { logger.info("other dir=" + otherDir + " not exist."); return; } if (!otherDir.isDirectory()) { logger.info("other dir=" + otherDir + " is not a directory."); } List dgnFiles = FileUtils.recurseDir(otherDir, new FileFilter() { public boolean accept(File pathname) { return pathname.isDirectory() || pathname.getName().toLowerCase().endsWith("dgn"); } }); for (File dgnFile : dgnFiles) { if (dgnFile.isDirectory()) continue; GeneralDgnConvertEdbGeoJobContext convertContext = new GeneralDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, isProfileMode(), isTransformed()); logger.info("--- start other dgnfile-" + dgnFile.toString() + " ---"); FileInputStream fs = null; FileChannel fc; Dgn7fileReader reader = null; try { convertContext.setExecutionContext(context); String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); convertContext.startTransaction(); fs = new FileInputStream(dgnFile); fc = fs.getChannel(); reader = new Dgn7fileReader(fc, new Lock()); convertContext.setReader(reader); scanOtherDgnElement(convertContext); convertContext.commitTransaction(); convertContext.closeFeatureWriter(); System.gc(); System.runFinalization(); } catch (FileNotFoundException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (Dgn7fileException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (IOException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (IllegalAttributeException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (SchemaException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } finally { convertContext.closeFeatureWriter(); if (reader != null) { try { reader.close(); } catch (IOException e) { logger.warn(e.getMessage(), e); } } if (fs != null) { try { fs.close(); } catch (IOException e) { logger.warn(e.getMessage(), e); } } if (isProfileMode()) { logger.warn("Profile-Current convertContext Process Cost-" + ((int) ((convertContext.getProcessTime()) / 60000.0)) + " min - " + (((int) ((convertContext.getProcessTime()) % 60000.0)) / 1000) + " sec"); logger.warn("Profile-Current convertContext Update Cost-" + ((int) ((convertContext.getUpdateTime()) / 60000.0)) + " min - " + (((int) ((convertContext.getUpdateTime()) % 60000.0)) / 1000) + " sec"); } } } } public void scanOtherDgnElement(GeneralDgnConvertEdbGeoJobContext convertContext) throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { Dgn7fileReader reader = convertContext.getReader(); int count = 0; Element lastComplex = null; while (reader.hasNext()) { Element.FileRecord record = reader.nextElement(); if (record.element() != null) { Element element = (Element) record.element(); ElementType type = element.getElementType(); if ((!type.isComplexElement()) && (!element.isComponentElement())) { if (lastComplex != null) { processOtherElement(lastComplex, convertContext); lastComplex = null; } processOtherElement(element, convertContext); } else if (element.isComponentElement()) { if (lastComplex != null) { ((ComplexElement) lastComplex).add(element); } } else if (type.isComplexElement()) { if (lastComplex != null) { processOtherElement(lastComplex, convertContext); } lastComplex = element; } } count++; } if (lastComplex != null) { processOtherElement(lastComplex, convertContext); } logger.debug("ElementRecord Count=" + count); } private void processOtherElement(Element element, GeneralDgnConvertEdbGeoJobContext convertContext) throws IllegalAttributeException, SchemaException { convertContext.putFeatureCollection(element); } private void clearOutputDatabase() { /* File outDataPath = new File(getDataPath(), OracleConvertEdbGeoJobContext.SHPOUTPATH); if (outDataPath.exists() && outDataPath.isDirectory()) { deleteFilesInPath(outDataPath); } outDataPath = new File(getDataPath(), IndexDgnConvertShpJobContext.SHPOUTPATH); if (outDataPath.exists() && outDataPath.isDirectory()) { deleteFilesInPath(outDataPath); } outDataPath = new File(getDataPath(), GeneralDgnConvertShpJobContext.SHPOUTPATH); if (outDataPath.exists() && outDataPath.isDirectory()) { deleteFilesInPath(outDataPath); } */ } private void deleteFilesInPath(File outDataPath) { deleteFilesInPath(outDataPath, true); } private void deleteFilesInPath(File outDataPath, boolean removeSubDir) { if (!outDataPath.isDirectory()) { return; } File[] files = outDataPath.listFiles(); for (File file : files) { if (file.isFile()) { if (!file.delete()) { logger.info("Cannot delete file-" + file.toString()); } } else if (file.isDirectory()) { deleteFilesInPath(file, removeSubDir); if (removeSubDir) { if (file.delete()) { logger.info("Cannot delete dir-" + file.toString()); } } } } } private void convertFeatureDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException { File elminDir = new File(getDataPath(), "elmin"); if (!elminDir.exists()) { logger.info("elmin dir=" + elminDir + " not exist."); return; } if (!elminDir.isDirectory()) { logger.info("elmin dir=" + elminDir + " is not a directory."); } File[] dgnFiles = elminDir.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".dgn"); } }); for (File dgnFile : dgnFiles) { FeatureDgnConvertEdbGeoJobContext convertContext = new FeatureDgnConvertEdbGeoJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath, isProfileMode(), isTransformed()); logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); try { convertContext.setExecutionContext(context); String dgnPaths[] = StringUtils.splitToArray(dgnFile.toString(), File.separator); convertContext.setFilename(dgnPaths[dgnPaths.length - 1]); convertContext.startTransaction(); FileInputStream fs = new FileInputStream(dgnFile); FileChannel fc = fs.getChannel(); Dgn7fileReader reader = new Dgn7fileReader(fc, new Lock()); convertContext.setReader(reader); scanFeatureDgnElement(convertContext); convertContext.commitTransaction(); convertContext.closeFeatureWriter(); System.gc(); System.runFinalization(); } catch (FileNotFoundException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (Dgn7fileException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (IOException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (IllegalAttributeException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } catch (SchemaException e) { convertContext.rollbackTransaction(); logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } finally { convertContext.closeFeatureWriter(); } } } public void scanFeatureDgnElement(FeatureDgnConvertEdbGeoJobContext convertContext) throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException { Dgn7fileReader reader = convertContext.getReader(); int count = 0; Element lastComplex = null; while (reader.hasNext()) { Element.FileRecord record = reader.nextElement(); if (record.element() != null) { Element element = (Element) record.element(); ElementType type = element.getElementType(); if ((!type.isComplexElement()) && (!element.isComponentElement())) { if (lastComplex != null) { processFeatureElement(lastComplex, convertContext); lastComplex = null; } processFeatureElement(element, convertContext); } else if (element.isComponentElement()) { if (lastComplex != null) { ((ComplexElement) lastComplex).add(element); } } else if (type.isComplexElement()) { if (lastComplex != null) { processFeatureElement(lastComplex, convertContext); } lastComplex = element; } } count++; } if (lastComplex != null) { processFeatureElement(lastComplex, convertContext); } logger.debug("ElementRecord Count=" + count); } private void processFeatureElement(Element element, FeatureDgnConvertEdbGeoJobContext convertContext) throws IllegalAttributeException, SchemaException { convertContext.putFeatureCollection(element); } private void createDummyFeatureFile(JobExecutionContext context) throws JobExecutionException { /* DummyFeatureConvertShpJobContext convertContext = new DummyFeatureConvertShpJobContext(getDataPath(), _filterPath); try { convertContext.startTransaction(); convertContext.commitTransaction(); convertContext.closeFeatureWriter(); } catch (IOException e) { logger.warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } */ } public DataStore getTargetDataStore() { return targetDataStore; } protected void createTargetDataStore() throws JobExecutionException { if (targetDataStore != null) { targetDataStore.dispose(); targetDataStore = null; } /* if (!isDriverFound()) { throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); } */ if (!edbProperties.containsKey(PostgisDataStoreFactory.MAXCONN.key)) { edbProperties.put(PostgisDataStoreFactory.MAXCONN.key, "5"); } if (!edbProperties.containsKey(PostgisDataStoreFactory.MINCONN.key)) { edbProperties.put(PostgisDataStoreFactory.MINCONN.key, "1"); } if (!edbProperties.containsKey(PostgisDataStoreFactory.WKBENABLED.key)) { edbProperties.put(PostgisDataStoreFactory.WKBENABLED.key, "true"); } if (!dataStoreFactory.canProcess(edbProperties)) { getLogger().warn("cannot process properties-"); throw new JobExecutionException("cannot process properties-"); } try { targetDataStore = (JDBCDataStore) dataStoreFactory.createDataStore(edbProperties); } catch (IOException e) { getLogger().warn(e.getMessage(), e); throw new JobExecutionException(e.getMessage(), e); } } protected void disconnect() { super.disconnect(); if (targetDataStore != null) { targetDataStore.dispose(); targetDataStore = null; } } private String determineTargetSchemaName() throws IOException { if (targetDataStore == null) return null; Connection connection = null; Statement stmt = null; ResultSet rs = null; String targetSchema = null; boolean needCreate = false; try { connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); rs = connection.getMetaData().getTables(null, _edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME, new String[]{"TABLE"}); if (!rs.next()) needCreate = true; rs.close(); rs = null; stmt = connection.createStatement(); stmt.execute("SET edb_redwood_date TO OFF"); stmt.execute("SET edb_redwood_strings TO OFF"); // stmt.execute("SET edb_stmt_level_tx TO OFF"); stmt.close(); if (needCreate) createXGeosVersionTable(connection, _edbSchema); StringBuilder sbSQL = new StringBuilder("SELECT "); sbSQL.append("vsschema, vsstatus FROM "); sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); sbSQL.append("ORDER BY vsid"); stmt = connection.createStatement(); rs = stmt.executeQuery(sbSQL.toString()); ArrayList tmpSchemas = new ArrayList(); int i = 0; int current = -1; while (rs.next()) { Object[] values = new Object[2]; values[0] = rs.getString("vsschema"); values[1] = rs.getShort("vsstatus"); tmpSchemas.add(values); if ((((Short) values[1]) & DataReposVersionManager.VSSTATUS_USING) != 0) { current = i; } i++; } if (current == -1) { Object[] values = tmpSchemas.get(0); targetSchema = (String) values[0]; } else if (current < (tmpSchemas.size() - 1)) { Object[] values = tmpSchemas.get(current + 1); targetSchema = (String) values[0]; } else { Object[] values = tmpSchemas.get(0); targetSchema = (String) values[0]; } sbSQL = new StringBuilder("UPDATE "); sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); sbSQL.append(" SET vsstatus = "); sbSQL.append(DataReposVersionManager.VSSTATUS_COVERT); sbSQL.append(" WHERE vsschema = '"); sbSQL.append(targetSchema).append("'"); int count = stmt.executeUpdate(sbSQL.toString()); if (count != 1) { logger.info("update status for " + targetSchema + " update result count=" + count); } } catch (SQLException e) { logger.warn(e.getMessage(), e); } finally { JDBCUtils.close(rs); JDBCUtils.close(stmt); JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); } return targetSchema; } public String encodeSchemaTableName(String schemaName, String tableName) { return "\"" + schemaName + "\".\"" + tableName + "\""; } private void createXGeosVersionTable(Connection connection, String pgSchema) throws SQLException { Statement stmt = null; StringBuilder sql = new StringBuilder("CREATE TABLE "); sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); sql.append(" ( vsid serial PRIMARY KEY, "); sql.append(" vsschema character varying(64) NOT NULL, "); sql.append(" vsstatus smallint NOT NULL, "); sql.append(" vstimestamp timestamp with time zone ) "); try { stmt = connection.createStatement(); stmt.executeUpdate(sql.toString()); sql = new StringBuilder("ALTER TABLE "); sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); sql.append(" OWNER TO ").append(_edbUsername); stmt.executeUpdate(sql.toString()); sql = new StringBuilder("GRANT ALL ON TABLE "); sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); sql.append(" TO public"); stmt.executeUpdate(sql.toString()); for (String schemaName : DataReposVersionManager.DEFAULTXGVERSIONSCHEMA_NAMES) { sql = new StringBuilder("INSERT INTO "); sql.append(encodeSchemaTableName(pgSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)); sql.append(" (vsschema, vsstatus) VALUES ('"); sql.append(schemaName).append("', "); sql.append(DataReposVersionManager.VSSTATUS_AVAILABLE).append(" )"); stmt.executeUpdate(sql.toString()); createIfNotExistNewSchema(connection, schemaName); } } finally { if (stmt != null) stmt.close(); } } private void updateRepoStatusToReady(String targetSchema) { if (targetDataStore == null) return; Connection connection = null; Statement stmt = null; ResultSet rs = null; boolean needCreate = false; try { StringBuilder sbSQL = new StringBuilder("UPDATE "); sbSQL.append(encodeSchemaTableName(_edbSchema, DataReposVersionManager.XGVERSIONTABLE_NAME)).append(' '); sbSQL.append(" SET vsstatus = "); sbSQL.append(DataReposVersionManager.VSSTATUS_READY); sbSQL.append(" , vstimestamp = CURRENT_TIMESTAMP WHERE vsschema = '"); sbSQL.append(targetSchema).append("'"); connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); stmt = connection.createStatement(); int count = stmt.executeUpdate(sbSQL.toString()); if (count != 1) { logger.info("update status for " + targetSchema + " update result count=" + count); } } catch (SQLException e) { logger.warn(e.getMessage(), e); } catch (IOException e) { logger.warn(e.getMessage(), e); } finally { JDBCUtils.close(rs); JDBCUtils.close(stmt); JDBCUtils.close(connection, Transaction.AUTO_COMMIT, null); } } private void createIfNotExistNewSchema(Connection connection, String s) throws SQLException { Statement stmt = null; ResultSet rs = null; try { /* rs = connection.getMetaData().getSchemas(null, s); if (rs.next()) return; rs.close(); rs = null; */ StringBuilder sbSQL = new StringBuilder("CREATE SCHEMA "); sbSQL.append(s).append(' '); sbSQL.append("AUTHORIZATION ").append(_edbUsername); stmt = connection.createStatement(); stmt.executeUpdate(sbSQL.toString()); sbSQL = new StringBuilder("GRANT ALL ON SCHEMA "); sbSQL.append(s).append(' '); sbSQL.append("TO public"); stmt.executeUpdate(sbSQL.toString()); } catch (SQLException e) { logger.info("create schema:" + s + " has exception."); logger.info(e.getMessage(), e); } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } } public final void accumulateQueryTime() { queryTime += System.currentTimeMillis() - queryTimeStart; } public long getQueryTime() { return queryTime; } public final void markQueryTime() { queryTimeStart = System.currentTimeMillis(); } public final void resetQueryTime() { queryTime = 0; } }