| | |
| | | import java.sql.SQLException; |
| | | import java.sql.Statement; |
| | | import java.sql.Types; |
| | | import java.util.Calendar; |
| | | import java.util.Date; |
| | | import java.util.Map; |
| | | import java.util.TreeMap; |
| | | |
| | | import org.apache.commons.collections.OrderedMap; |
| | | import org.apache.commons.collections.OrderedMapIterator; |
| | | import org.apache.commons.collections.map.LinkedMap; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.oracle.OracleDataStore; |
| | | import org.geotools.data.oracle.OracleDataStoreFactory; |
| | | import org.geotools.feature.IllegalAttributeException; |
| | | import org.geotools.feature.SchemaException; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | |
| | | import oracle.sql.BLOB; |
| | | |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.jobs.context.shapefile.FeatureDgnConvertShpJobContext; |
| | | import com.ximple.eofms.jobs.context.shapefile.GeneralDgnConvertShpJobContext; |
| | | import com.ximple.eofms.jobs.context.shapefile.IndexDgnConvertShpJobContext; |
| | | import com.ximple.eofms.jobs.context.shapefile.OracleConvertShapefilesJobContext; |
| | | import com.ximple.eofms.jobs.context.orasdo.FeatureDgnConvertOraSDOJobContext; |
| | | import com.ximple.eofms.jobs.context.orasdo.GeneralDgnConvertOraSDOJobContext; |
| | | import com.ximple.eofms.jobs.context.orasdo.IndexDgnConvertOraSDOJobContext; |
| | | import com.ximple.eofms.jobs.context.orasdo.OracleConvertOraSDOJobContext; |
| | | import com.ximple.eofms.util.BinConverter; |
| | | import com.ximple.eofms.util.ByteArrayCompressor; |
| | | import com.ximple.eofms.util.StringUtils; |
| | |
| | | { |
| | | final static Log logger = LogFactory.getLog(OracleConvertDgn2OraSDOJob.class); |
| | | |
| | | /** |
| | | * The Oracle driver class name |
| | | */ |
| | | private static final String JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver"; |
| | | private static final String SDOHOST = "SDOHOST"; |
| | | private static final String SDODDATBASE = "SDODDATBASE"; |
| | | private static final String SDOPORT = "SDOPORT"; |
| | | private static final String SDOSCHEMA = "SDOSCHEMA"; |
| | | private static final String SDOUSER = "SDOUSER"; |
| | | private static final String SDOPASS = "SDOPASS"; |
| | | private static final String USEWKB = "USEWKB"; |
| | | |
| | | private static final int FETCHSIZE = 30; |
| | | private static final int BATCHSIZE = 25; |
| | |
| | | } |
| | | } |
| | | |
| | | protected static OracleDataStoreFactory dataStoreFactory = new OracleDataStoreFactory(); |
| | | |
| | | GeometryFactory _geomFactory = new GeometryFactory(); |
| | | protected String _sdoHost; |
| | | protected String _sdoDatabase; |
| | | protected String _sdoPort; |
| | | protected String _sdoSchema; |
| | | protected String _sdoUsername; |
| | | protected String _sdoPassword; |
| | | protected String _sdoUseWKB; |
| | | |
| | | protected Map<String, String> sdoProperties; |
| | | protected OracleDataStore targetDataStore; |
| | | |
| | | public Log getLogger() |
| | | { |
| | | return logger; |
| | | } |
| | | |
| | | // OracleDataStoreFactory factory = new OracleDataStoreFactory(); |
| | | /* |
| | | Map map = new HashMap(); |
| | | map.put("host", fixture.getProperty("host")); |
| | | map.put("port", fixture.getProperty("port")); |
| | | map.put("instance", fixture.getProperty("instance")); |
| | | map.put("user", fixture.getProperty("user")); |
| | | map.put("passwd", fixture.getProperty("passwd")); |
| | | map.put("dbtype", "oracle"); |
| | | map.put("alias", fixture.getProperty("instance")); |
| | | map.put("namespace", null); |
| | | |
| | | assertTrue(factory.canProcess(map)); |
| | | |
| | | OracleDataStore store = (OracleDataStore) factory.createDataStore(map); |
| | | assertNull(store.getNameSpace()); |
| | | |
| | | map.put("schema", fixture.getProperty("user").toUpperCase()); |
| | | store = (OracleDataStore) factory.createDataStore(map); |
| | | assertNull(store.getNameSpace()); |
| | | |
| | | map.put("namespace", "topp"); |
| | | store = (OracleDataStore) factory.createDataStore(map); |
| | | assertEquals(new URI("topp"), store.getNameSpace()); |
| | | */ |
| | | |
| | | /* |
| | | FeatureWriter writer = dstore.getFeatureWriter("ORA_TEST_POINTS", Filter.INCLUDE, |
| | | Transaction.AUTO_COMMIT); |
| | | assertNotNull(writer); |
| | | |
| | | Feature feature = writer.next(); |
| | | System.out.println(feature); |
| | | feature.setAttribute(0, "Changed Feature"); |
| | | System.out.println(feature); |
| | | writer.write(); |
| | | writer.close(); |
| | | */ |
| | | |
| | | /* |
| | | Map fidGen = new HashMap(); |
| | | fidGen.put("ORA_TEST_POINTS", JDBCDataStoreConfig.FID_GEN_MANUAL_INC); |
| | | |
| | | JDBCDataStoreConfig config = JDBCDataStoreConfig.createWithSchemaNameAndFIDGenMap(schemaName, |
| | | fidGen); |
| | | |
| | | String name = "add_name"; |
| | | BigDecimal intval = new BigDecimal(70); |
| | | Point point = jtsFactory.createPoint(new Coordinate(-15.0, -25)); |
| | | Feature feature = dstore.getSchema("ORA_TEST_POINTS") |
| | | .create(new Object[] { name, intval, point }); |
| | | |
| | | FeatureStore fs = (FeatureStore) dstore.getFeatureSource("ORA_TEST_POINTS"); |
| | | fs.addFeatures(DataUtilities.collection(feature)); |
| | | */ |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String filterPath) |
| | | { |
| | | return new OracleConvertShapefilesJobContext(filterPath); |
| | | return new OracleConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), filterPath); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException |
| | | { |
| | | super.extractJobConfiguration(jobDetail); |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _sdoHost = dataMap.getString(SDOHOST); |
| | | _sdoDatabase = dataMap.getString(SDODDATBASE); |
| | | _sdoPort = dataMap.getString(SDOPORT); |
| | | _sdoSchema = dataMap.getString(SDOSCHEMA); |
| | | _sdoUsername = dataMap.getString(SDOUSER); |
| | | _sdoPassword = dataMap.getString(SDOPASS); |
| | | _sdoUseWKB = dataMap.getString(USEWKB); |
| | | |
| | | Log logger = getLogger(); |
| | | /* |
| | | logger.info("SDOHOST=" + _myHost); |
| | | logger.info("SDODDATBASE=" + _myDatabase); |
| | | logger.info("SDOPORT=" + _myPort); |
| | | logger.info("SDOSCHEMA=" + _mySchema); |
| | | logger.info("SDOUSER=" + _myUsername); |
| | | logger.info("SDOPASS=" + _myPassword); |
| | | logger.info("USEWKB=" + _myUseWKB); |
| | | */ |
| | | |
| | | if (_sdoHost == null) |
| | | { |
| | | logger.warn("SDOHOST is null"); |
| | | throw new JobExecutionException("Unknown OraSDO host."); |
| | | } |
| | | if (_sdoDatabase == null) |
| | | { |
| | | logger.warn("PGDATABASE is null"); |
| | | throw new JobExecutionException("Unknown OraSDO database."); |
| | | } |
| | | if (_sdoPort == null) |
| | | { |
| | | logger.warn("SDOPORT is null"); |
| | | throw new JobExecutionException("Unknown OraSDO port."); |
| | | } |
| | | if (_sdoSchema == null) |
| | | { |
| | | logger.warn("SDOSCHEMA is null"); |
| | | throw new JobExecutionException("Unknown OraSDO schema."); |
| | | } |
| | | if (_sdoUsername == null) |
| | | { |
| | | logger.warn("PGUSERNAME is null"); |
| | | throw new JobExecutionException("Unknown OraSDO username."); |
| | | } |
| | | if (_sdoPassword == null) |
| | | { |
| | | logger.warn("PGPASSWORD is null"); |
| | | throw new JobExecutionException("Unknown OraSDO password."); |
| | | } |
| | | |
| | | Map<String, String> remote = new TreeMap<String, String>(); |
| | | remote.put("dbtype", "OraSDO"); |
| | | remote.put("charset", "UTF-8"); |
| | | remote.put("host", _sdoHost); |
| | | remote.put("port", _sdoPort); |
| | | remote.put("database", _sdoDatabase); |
| | | remote.put("user", _sdoUsername); |
| | | remote.put("passwd", _sdoPassword); |
| | | remote.put("namespace", null); |
| | | sdoProperties = remote; |
| | | } |
| | | |
| | | public void execute(JobExecutionContext context) throws JobExecutionException |
| | |
| | | logger.info(jobName + " fired at " + new Date()); |
| | | extractJobConfiguration(jobDetail); |
| | | createSourceDataStore(); |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) |
| | | { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) |
| | | { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | Calendar cal = Calendar.getInstance(); |
| | | Date startTime = cal.getTime(); |
| | | try |
| | | { |
| | | logger.info("-- step:clearOutputDirectory --"); |
| | | clearOutputDirectory(); |
| | | logger.info("-- step:clearOutputDatabase --"); |
| | | clearOutputDatabase(); |
| | | boolean bFirst = true; |
| | | if (checkConvertDB()) |
| | | { |
| | |
| | | |
| | | for (String orgSchema : _orgSchema) |
| | | { |
| | | OracleConvertShapefilesJobContext jobContext = (OracleConvertShapefilesJobContext) prepareJobContext(_filterPath); |
| | | OracleConvertOraSDOJobContext jobContext = |
| | | (OracleConvertOraSDOJobContext) prepareJobContext(_filterPath); |
| | | jobContext.setSourceDataStore(getSourceDataStore()); |
| | | jobContext.setConvertElementIn(_convertElementIn); |
| | | // jobContext.setConvertElementIn(_convertElementIn); |
| | | jobContext.setElementLogging(checkElementLogging()); |
| | | jobContext.setExecutionContext(context); |
| | | |
| | |
| | | } |
| | | |
| | | disconnect(); |
| | | Date endTime = cal.getTime(); |
| | | Date time = new Date(endTime.getTime() - startTime.getTime()); |
| | | // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss"; |
| | | // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW); |
| | | logger.warn("use time = " + time); |
| | | } catch (SQLException e) |
| | | { |
| | | logger.warn(e.getMessage(), e); |
| | |
| | | * @param jobContext job context |
| | | * @throws SQLException sql exception |
| | | */ |
| | | private void copyConnectivity(OracleConvertShapefilesJobContext jobContext) throws SQLException |
| | | private void copyConnectivity(OracleConvertOraSDOJobContext jobContext) throws SQLException |
| | | { |
| | | OracleConnection connection = jobContext.getOracleConnection(); |
| | | Statement stmt = connection.createStatement(); |
| | | stmt.execute(OracleConvertShapefilesJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(OracleConvertShapefilesJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK); |
| | | stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK); |
| | | } |
| | | |
| | | private void exetcuteConvert(OracleConvertShapefilesJobContext jobContext, |
| | | private void exetcuteConvert(OracleConvertOraSDOJobContext jobContext, |
| | | String querySchema, String dataPath) throws SQLException |
| | | { |
| | | int order = 0; |
| | |
| | | |
| | | //jobContext.startTransaction(); |
| | | jobContext.setCurrentSchema(querySchema); |
| | | jobContext.getExecutionContext().put("ConvertDgn2ShpJobProgress", 0); |
| | | jobContext.getExecutionContext().put("ConvertDgn2OraSDOJobProgress", 0); |
| | | for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext();) |
| | | { |
| | | it.next(); |
| | |
| | | if (now != current) |
| | | { |
| | | current = now; |
| | | jobContext.getExecutionContext().put("ConvertDgn2ShpJobProgress", current); |
| | | jobContext.getExecutionContext().put("ConvertDgn2OraSDOJobProgress", current); |
| | | |
| | | } |
| | | } |
| | | jobContext.getExecutionContext().put("ConvertDgn2ShpJobProgress", 100); |
| | | jobContext.getExecutionContext().put("ConvertDgn2OraSDOJobProgress", 100); |
| | | |
| | | jobContext.commitTransaction(); |
| | | |
| | |
| | | return orderedMap; |
| | | } |
| | | |
| | | protected void queryIgsetElement(OracleConvertShapefilesJobContext jobContext, |
| | | protected void queryIgsetElement(OracleConvertOraSDOJobContext jobContext, |
| | | String srcschema, String srctable) throws SQLException |
| | | { |
| | | OracleConnection connection = jobContext.getOracleConnection(); |
| | |
| | | stmtSrc.close(); |
| | | } |
| | | |
| | | protected void queryRawElement(OracleConvertShapefilesJobContext jobContext, |
| | | protected void queryRawElement(OracleConvertOraSDOJobContext jobContext, |
| | | String srcschema, String srctable) throws SQLException |
| | | { |
| | | OracleConnection connection = jobContext.getOracleConnection(); |
| | |
| | | |
| | | for (File dgnFile : dgnFiles) |
| | | { |
| | | IndexDgnConvertShpJobContext convertContext = new IndexDgnConvertShpJobContext(getDataPath()); |
| | | IndexDgnConvertOraSDOJobContext convertContext = |
| | | new IndexDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore()); |
| | | logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try |
| | | { |
| | |
| | | } |
| | | } |
| | | |
| | | protected void scanIndexDgnElement(IndexDgnConvertShpJobContext convertContext) |
| | | protected void scanIndexDgnElement(IndexDgnConvertOraSDOJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException |
| | | { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processIndexElement(Element element, IndexDgnConvertShpJobContext convertContext) throws IllegalAttributeException, SchemaException |
| | | private void processIndexElement(Element element, IndexDgnConvertOraSDOJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException |
| | | { |
| | | if (element instanceof TextElement) |
| | | { |
| | |
| | | |
| | | for (File dgnFile : dgnFiles) |
| | | { |
| | | GeneralDgnConvertShpJobContext convertContext = new GeneralDgnConvertShpJobContext(getDataPath()); |
| | | GeneralDgnConvertOraSDOJobContext convertContext = |
| | | new GeneralDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore()); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try |
| | | { |
| | |
| | | } |
| | | } |
| | | |
| | | public void scanOtherDgnElement(GeneralDgnConvertShpJobContext convertContext) |
| | | public void scanOtherDgnElement(GeneralDgnConvertOraSDOJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException |
| | | { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processOtherElement(Element element, GeneralDgnConvertShpJobContext convertContext) |
| | | private void processOtherElement(Element element, GeneralDgnConvertOraSDOJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException |
| | | { |
| | | convertContext.putFeatureCollection(element); |
| | | } |
| | | |
| | | private void clearOutputDirectory() |
| | | private void clearOutputDatabase() |
| | | { |
| | | File outDataPath = new File(getDataPath(), OracleConvertShapefilesJobContext.SHPOUTPATH); |
| | | /* |
| | | File outDataPath = new File(getDataPath(), OracleConvertOraSDOJobContext.SHPOUTPATH); |
| | | if (outDataPath.exists() && outDataPath.isDirectory()) |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | |
| | | { |
| | | deleteFilesInPath(outDataPath); |
| | | } |
| | | */ |
| | | } |
| | | |
| | | private void deleteFilesInPath(File outDataPath) |
| | |
| | | |
| | | for (File dgnFile : dgnFiles) |
| | | { |
| | | FeatureDgnConvertShpJobContext convertContext = new FeatureDgnConvertShpJobContext(getDataPath(), _filterPath); |
| | | FeatureDgnConvertOraSDOJobContext convertContext = |
| | | new FeatureDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), _filterPath); |
| | | logger.info("--- start dgnfile-" + dgnFile.toString() + " ---"); |
| | | try |
| | | { |
| | |
| | | } |
| | | } |
| | | |
| | | public void scanFeatureDgnElement(FeatureDgnConvertShpJobContext convertContext) |
| | | public void scanFeatureDgnElement(FeatureDgnConvertOraSDOJobContext convertContext) |
| | | throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException |
| | | { |
| | | Dgn7fileReader reader = convertContext.getReader(); |
| | |
| | | logger.debug("ElementRecord Count=" + count); |
| | | } |
| | | |
| | | private void processFeatureElement(Element element, FeatureDgnConvertShpJobContext convertContext) |
| | | private void processFeatureElement(Element element, FeatureDgnConvertOraSDOJobContext convertContext) |
| | | throws IllegalAttributeException, SchemaException |
| | | { |
| | | convertContext.putFeatureCollection(element); |
| | |
| | | } |
| | | */ |
| | | } |
| | | |
| | | public DataStore getTargetDataStore() |
| | | { |
| | | return targetDataStore; |
| | | } |
| | | |
| | | protected void createTargetDataStore() throws JobExecutionException |
| | | { |
| | | if (targetDataStore != null) |
| | | { |
| | | targetDataStore.dispose(); |
| | | targetDataStore = null; |
| | | } |
| | | |
| | | /* |
| | | if (!isDriverFound()) |
| | | { |
| | | throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER); |
| | | } |
| | | */ |
| | | |
| | | if (!sdoProperties.containsKey(OracleDataStoreFactory.MAXCONN.key)) |
| | | { |
| | | sdoProperties.put(OracleDataStoreFactory.MAXCONN.key, "2"); |
| | | } |
| | | |
| | | if (!sdoProperties.containsKey(OracleDataStoreFactory.MINCONN.key)) |
| | | { |
| | | sdoProperties.put(OracleDataStoreFactory.MINCONN.key, "1"); |
| | | } |
| | | |
| | | /* |
| | | if (!sdoProperties.containsKey(OracleDataStoreFactory.WKBENABLED.key)) |
| | | { |
| | | sdoProperties.put(OracleDataStoreFactory.WKBENABLED.key, "true"); |
| | | } |
| | | */ |
| | | |
| | | if (!dataStoreFactory.canProcess(sdoProperties)) |
| | | { |
| | | getLogger().warn("cannot process properties-"); |
| | | throw new JobExecutionException("cannot process properties-"); |
| | | } |
| | | try |
| | | { |
| | | targetDataStore = (OracleDataStore) dataStoreFactory.createDataStore(sdoProperties); |
| | | } catch (IOException e) |
| | | { |
| | | getLogger().warn(e.getMessage(), e); |
| | | throw new JobExecutionException(e.getMessage(), e); |
| | | } |
| | | } |
| | | } |