forked from geodmms/xdgnjobs

?? ?
2008-06-09 bd210ee7438fd203c19d3e8080ea12b79fe56159
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2OraSDOJob.java
@@ -13,15 +13,22 @@
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import java.util.TreeMap;
import org.apache.commons.collections.OrderedMap;
import org.apache.commons.collections.OrderedMapIterator;
import org.apache.commons.collections.map.LinkedMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.data.DataStore;
import org.geotools.data.oracle.OracleDataStore;
import org.geotools.data.oracle.OracleDataStoreFactory;
import org.geotools.feature.IllegalAttributeException;
import org.geotools.feature.SchemaException;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
@@ -34,10 +41,10 @@
import oracle.sql.BLOB;
import com.ximple.eofms.jobs.context.AbstractOracleJobContext;
import com.ximple.eofms.jobs.context.shapefile.FeatureDgnConvertShpJobContext;
import com.ximple.eofms.jobs.context.shapefile.GeneralDgnConvertShpJobContext;
import com.ximple.eofms.jobs.context.shapefile.IndexDgnConvertShpJobContext;
import com.ximple.eofms.jobs.context.shapefile.OracleConvertShapefilesJobContext;
import com.ximple.eofms.jobs.context.orasdo.FeatureDgnConvertOraSDOJobContext;
import com.ximple.eofms.jobs.context.orasdo.GeneralDgnConvertOraSDOJobContext;
import com.ximple.eofms.jobs.context.orasdo.IndexDgnConvertOraSDOJobContext;
import com.ximple.eofms.jobs.context.orasdo.OracleConvertOraSDOJobContext;
import com.ximple.eofms.util.BinConverter;
import com.ximple.eofms.util.ByteArrayCompressor;
import com.ximple.eofms.util.StringUtils;
@@ -55,10 +62,13 @@
{
    final static Log logger = LogFactory.getLog(OracleConvertDgn2OraSDOJob.class);
    /**
     * The Oracle driver class name
     */
    private static final String JDBC_DRIVER = "oracle.jdbc.driver.OracleDriver";
    private static final String SDOHOST = "SDOHOST";
    private static final String SDODDATBASE = "SDODDATBASE";
    private static final String SDOPORT = "SDOPORT";
    private static final String SDOSCHEMA = "SDOSCHEMA";
    private static final String SDOUSER = "SDOUSER";
    private static final String SDOPASS = "SDOPASS";
    private static final String USEWKB = "USEWKB";
    private static final int FETCHSIZE = 30;
    private static final int BATCHSIZE = 25;
@@ -76,72 +86,94 @@
        }
    }
    protected static OracleDataStoreFactory dataStoreFactory = new OracleDataStoreFactory();
    GeometryFactory _geomFactory = new GeometryFactory();
    protected String _sdoHost;
    protected String _sdoDatabase;
    protected String _sdoPort;
    protected String _sdoSchema;
    protected String _sdoUsername;
    protected String _sdoPassword;
    protected String _sdoUseWKB;
    protected Map<String, String> sdoProperties;
    protected OracleDataStore targetDataStore;
    public Log getLogger()
    {
        return logger;
    }
    // OracleDataStoreFactory factory = new OracleDataStoreFactory();
    /*
        Map map = new HashMap();
        map.put("host", fixture.getProperty("host"));
        map.put("port", fixture.getProperty("port"));
        map.put("instance", fixture.getProperty("instance"));
        map.put("user", fixture.getProperty("user"));
        map.put("passwd", fixture.getProperty("passwd"));
        map.put("dbtype", "oracle");
        map.put("alias", fixture.getProperty("instance"));
        map.put("namespace", null);
        assertTrue(factory.canProcess(map));
        OracleDataStore store = (OracleDataStore) factory.createDataStore(map);
        assertNull(store.getNameSpace());
        map.put("schema", fixture.getProperty("user").toUpperCase());
        store = (OracleDataStore) factory.createDataStore(map);
        assertNull(store.getNameSpace());
        map.put("namespace", "topp");
        store = (OracleDataStore) factory.createDataStore(map);
        assertEquals(new URI("topp"), store.getNameSpace());
     */
    /*
        FeatureWriter writer = dstore.getFeatureWriter("ORA_TEST_POINTS", Filter.INCLUDE,
                Transaction.AUTO_COMMIT);
        assertNotNull(writer);
        Feature feature = writer.next();
        System.out.println(feature);
        feature.setAttribute(0, "Changed Feature");
        System.out.println(feature);
        writer.write();
        writer.close();
     */
    /*
        Map fidGen = new HashMap();
        fidGen.put("ORA_TEST_POINTS", JDBCDataStoreConfig.FID_GEN_MANUAL_INC);
        JDBCDataStoreConfig config = JDBCDataStoreConfig.createWithSchemaNameAndFIDGenMap(schemaName,
                fidGen);
        String name = "add_name";
        BigDecimal intval = new BigDecimal(70);
        Point point = jtsFactory.createPoint(new Coordinate(-15.0, -25));
        Feature feature = dstore.getSchema("ORA_TEST_POINTS")
                                .create(new Object[] { name, intval, point });
        FeatureStore fs = (FeatureStore) dstore.getFeatureSource("ORA_TEST_POINTS");
        fs.addFeatures(DataUtilities.collection(feature));
     */
    protected AbstractOracleJobContext prepareJobContext(String filterPath)
    {
        return new OracleConvertShapefilesJobContext(filterPath);
        return new OracleConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), filterPath);
    }
    protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException
    {
        super.extractJobConfiguration(jobDetail);
        JobDataMap dataMap = jobDetail.getJobDataMap();
        _sdoHost = dataMap.getString(SDOHOST);
        _sdoDatabase = dataMap.getString(SDODDATBASE);
        _sdoPort = dataMap.getString(SDOPORT);
        _sdoSchema = dataMap.getString(SDOSCHEMA);
        _sdoUsername = dataMap.getString(SDOUSER);
        _sdoPassword = dataMap.getString(SDOPASS);
        _sdoUseWKB = dataMap.getString(USEWKB);
        Log logger = getLogger();
        /*
        logger.info("SDOHOST=" + _myHost);
        logger.info("SDODDATBASE=" + _myDatabase);
        logger.info("SDOPORT=" + _myPort);
        logger.info("SDOSCHEMA=" + _mySchema);
        logger.info("SDOUSER=" + _myUsername);
        logger.info("SDOPASS=" + _myPassword);
        logger.info("USEWKB=" + _myUseWKB);
        */
        if (_sdoHost == null)
        {
            logger.warn("SDOHOST is null");
            throw new JobExecutionException("Unknown OraSDO host.");
        }
        if (_sdoDatabase == null)
        {
            logger.warn("PGDATABASE is null");
            throw new JobExecutionException("Unknown OraSDO database.");
        }
        if (_sdoPort == null)
        {
            logger.warn("SDOPORT is null");
            throw new JobExecutionException("Unknown OraSDO port.");
        }
        if (_sdoSchema == null)
        {
            logger.warn("SDOSCHEMA is null");
            throw new JobExecutionException("Unknown OraSDO schema.");
        }
        if (_sdoUsername == null)
        {
            logger.warn("PGUSERNAME is null");
            throw new JobExecutionException("Unknown OraSDO username.");
        }
        if (_sdoPassword == null)
        {
            logger.warn("PGPASSWORD is null");
            throw new JobExecutionException("Unknown OraSDO password.");
        }
        Map<String, String> remote = new TreeMap<String, String>();
        remote.put("dbtype", "OraSDO");
        remote.put("charset", "UTF-8");
        remote.put("host", _sdoHost);
        remote.put("port", _sdoPort);
        remote.put("database", _sdoDatabase);
        remote.put("user", _sdoUsername);
        remote.put("passwd", _sdoPassword);
        remote.put("namespace", null);
        sdoProperties = remote;
    }
    public void execute(JobExecutionContext context) throws JobExecutionException
@@ -156,15 +188,25 @@
        logger.info(jobName + " fired at " + new Date());
        extractJobConfiguration(jobDetail);
        createSourceDataStore();
        createTargetDataStore();
        if (getSourceDataStore() == null)
        {
            logger.warn("Cannot connect source oracle database.");
            throw new JobExecutionException("Cannot connect source oracle database.");
        }
        if (getTargetDataStore() == null)
        {
            logger.warn("Cannot connect source postgreSQL database.");
            throw new JobExecutionException("Cannot connect source postgreSQL database.");
        }
        Calendar cal = Calendar.getInstance();
        Date startTime = cal.getTime();
        try
        {
            logger.info("-- step:clearOutputDirectory --");
            clearOutputDirectory();
            logger.info("-- step:clearOutputDatabase --");
            clearOutputDatabase();
            boolean bFirst = true;
            if (checkConvertDB())
            {
@@ -172,9 +214,10 @@
                for (String orgSchema : _orgSchema)
                {
                    OracleConvertShapefilesJobContext jobContext = (OracleConvertShapefilesJobContext) prepareJobContext(_filterPath);
                    OracleConvertOraSDOJobContext jobContext =
                            (OracleConvertOraSDOJobContext) prepareJobContext(_filterPath);
                    jobContext.setSourceDataStore(getSourceDataStore());
                    jobContext.setConvertElementIn(_convertElementIn);
                    // jobContext.setConvertElementIn(_convertElementIn);
                    jobContext.setElementLogging(checkElementLogging());
                    jobContext.setExecutionContext(context);
@@ -212,6 +255,11 @@
            }
            disconnect();
            Date endTime = cal.getTime();
            Date time = new Date(endTime.getTime() - startTime.getTime());
            // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss";
            // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW);
            logger.warn("use time = " + time);
        } catch (SQLException e)
        {
            logger.warn(e.getMessage(), e);
@@ -230,15 +278,15 @@
     * @param jobContext job context
     * @throws SQLException sql exception
     */
    private void copyConnectivity(OracleConvertShapefilesJobContext jobContext) throws SQLException
    private void copyConnectivity(OracleConvertOraSDOJobContext jobContext) throws SQLException
    {
        OracleConnection connection = jobContext.getOracleConnection();
        Statement stmt = connection.createStatement();
        stmt.execute(OracleConvertShapefilesJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK);
        stmt.execute(OracleConvertShapefilesJobContext.COPY_CONNECTIVITY_TO_WEBCHECK);
        stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK);
        stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK);
    }
    private void exetcuteConvert(OracleConvertShapefilesJobContext jobContext,
    private void exetcuteConvert(OracleConvertOraSDOJobContext jobContext,
                                 String querySchema, String dataPath) throws SQLException
    {
        int order = 0;
@@ -253,7 +301,7 @@
        //jobContext.startTransaction();
        jobContext.setCurrentSchema(querySchema);
        jobContext.getExecutionContext().put("ConvertDgn2ShpJobProgress", 0);
        jobContext.getExecutionContext().put("ConvertDgn2OraSDOJobProgress", 0);
        for (OrderedMapIterator it = map.orderedMapIterator(); it.hasNext();)
        {
            it.next();
@@ -285,11 +333,11 @@
            if (now != current)
            {
                current = now;
                jobContext.getExecutionContext().put("ConvertDgn2ShpJobProgress", current);
                jobContext.getExecutionContext().put("ConvertDgn2OraSDOJobProgress", current);
            }
        }
        jobContext.getExecutionContext().put("ConvertDgn2ShpJobProgress", 100);
        jobContext.getExecutionContext().put("ConvertDgn2OraSDOJobProgress", 100);
        jobContext.commitTransaction();
@@ -387,7 +435,7 @@
        return orderedMap;
    }
    protected void queryIgsetElement(OracleConvertShapefilesJobContext jobContext,
    protected void queryIgsetElement(OracleConvertOraSDOJobContext jobContext,
                                     String srcschema, String srctable) throws SQLException
    {
        OracleConnection connection = jobContext.getOracleConnection();
@@ -429,7 +477,7 @@
        stmtSrc.close();
    }
    protected void queryRawElement(OracleConvertShapefilesJobContext jobContext,
    protected void queryRawElement(OracleConvertOraSDOJobContext jobContext,
                                   String srcschema, String srctable) throws SQLException
    {
        OracleConnection connection = jobContext.getOracleConnection();
@@ -568,7 +616,8 @@
        for (File dgnFile : dgnFiles)
        {
            IndexDgnConvertShpJobContext convertContext = new IndexDgnConvertShpJobContext(getDataPath());
            IndexDgnConvertOraSDOJobContext convertContext =
                    new IndexDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore());
            logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---");
            try
            {
@@ -615,7 +664,7 @@
        }
    }
    protected void scanIndexDgnElement(IndexDgnConvertShpJobContext convertContext)
    protected void scanIndexDgnElement(IndexDgnConvertOraSDOJobContext convertContext)
            throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException
    {
        Dgn7fileReader reader = convertContext.getReader();
@@ -658,7 +707,8 @@
        logger.debug("ElementRecord Count=" + count);
    }
    private void processIndexElement(Element element, IndexDgnConvertShpJobContext convertContext) throws IllegalAttributeException, SchemaException
    private void processIndexElement(Element element, IndexDgnConvertOraSDOJobContext convertContext)
            throws IllegalAttributeException, SchemaException
    {
        if (element instanceof TextElement)
        {
@@ -698,7 +748,8 @@
        for (File dgnFile : dgnFiles)
        {
            GeneralDgnConvertShpJobContext convertContext = new GeneralDgnConvertShpJobContext(getDataPath());
            GeneralDgnConvertOraSDOJobContext convertContext =
                    new GeneralDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore());
            logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
            try
            {
@@ -745,7 +796,7 @@
        }
    }
    public void scanOtherDgnElement(GeneralDgnConvertShpJobContext convertContext)
    public void scanOtherDgnElement(GeneralDgnConvertOraSDOJobContext convertContext)
            throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException
    {
        Dgn7fileReader reader = convertContext.getReader();
@@ -788,15 +839,16 @@
        logger.debug("ElementRecord Count=" + count);
    }
    private void processOtherElement(Element element, GeneralDgnConvertShpJobContext convertContext)
    private void processOtherElement(Element element, GeneralDgnConvertOraSDOJobContext convertContext)
            throws IllegalAttributeException, SchemaException
    {
        convertContext.putFeatureCollection(element);
    }
    private void clearOutputDirectory()
    private void clearOutputDatabase()
    {
        File outDataPath = new File(getDataPath(), OracleConvertShapefilesJobContext.SHPOUTPATH);
        /*
        File outDataPath = new File(getDataPath(), OracleConvertOraSDOJobContext.SHPOUTPATH);
        if (outDataPath.exists() && outDataPath.isDirectory())
        {
            deleteFilesInPath(outDataPath);
@@ -811,6 +863,7 @@
        {
            deleteFilesInPath(outDataPath);
        }
        */
    }
    private void deleteFilesInPath(File outDataPath)
@@ -871,7 +924,8 @@
        for (File dgnFile : dgnFiles)
        {
            FeatureDgnConvertShpJobContext convertContext = new FeatureDgnConvertShpJobContext(getDataPath(), _filterPath);
            FeatureDgnConvertOraSDOJobContext convertContext =
                    new FeatureDgnConvertOraSDOJobContext(getDataPath(), getTargetDataStore(), _filterPath);
            logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
            try
            {
@@ -918,7 +972,7 @@
        }
    }
    public void scanFeatureDgnElement(FeatureDgnConvertShpJobContext convertContext)
    public void scanFeatureDgnElement(FeatureDgnConvertOraSDOJobContext convertContext)
            throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException
    {
        Dgn7fileReader reader = convertContext.getReader();
@@ -961,7 +1015,7 @@
        logger.debug("ElementRecord Count=" + count);
    }
    private void processFeatureElement(Element element, FeatureDgnConvertShpJobContext convertContext)
    private void processFeatureElement(Element element, FeatureDgnConvertOraSDOJobContext convertContext)
            throws IllegalAttributeException, SchemaException
    {
        convertContext.putFeatureCollection(element);
@@ -982,4 +1036,56 @@
        }
        */
    }
    public DataStore getTargetDataStore()
    {
        return targetDataStore;
    }
    protected void createTargetDataStore() throws JobExecutionException
    {
        if (targetDataStore != null)
        {
            targetDataStore.dispose();
            targetDataStore = null;
        }
        /*
        if (!isDriverFound())
        {
            throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER);
        }
        */
        if (!sdoProperties.containsKey(OracleDataStoreFactory.MAXCONN.key))
        {
            sdoProperties.put(OracleDataStoreFactory.MAXCONN.key, "2");
        }
        if (!sdoProperties.containsKey(OracleDataStoreFactory.MINCONN.key))
        {
            sdoProperties.put(OracleDataStoreFactory.MINCONN.key, "1");
        }
        /*
        if (!sdoProperties.containsKey(OracleDataStoreFactory.WKBENABLED.key))
        {
            sdoProperties.put(OracleDataStoreFactory.WKBENABLED.key, "true");
        }
        */
        if (!dataStoreFactory.canProcess(sdoProperties))
        {
            getLogger().warn("cannot process properties-");
            throw new JobExecutionException("cannot process properties-");
        }
        try
        {
            targetDataStore = (OracleDataStore) dataStoreFactory.createDataStore(sdoProperties);
        } catch (IOException e)
        {
            getLogger().warn(e.getMessage(), e);
            throw new JobExecutionException(e.getMessage(), e);
        }
    }
}