forked from geodmms/xdgnjobs

?? ?
2008-06-06 aaf4c6c3a1d50b67b9a7dfd1bc011615ba5d9f57
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java
@@ -1,56 +1,62 @@
package com.ximple.eofms.jobs;
import java.util.Date;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.ResultSet;
import java.sql.Types;
import java.io.IOException;
import java.io.File;
import java.io.FilenameFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FilenameFilter;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.collections.OrderedMap;
import org.apache.commons.collections.OrderedMapIterator;
import org.apache.commons.collections.map.LinkedMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobDetail;
import org.quartz.JobDataMap;
import org.geotools.feature.IllegalAttributeException;
import org.geotools.feature.SchemaException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import java.util.TreeMap;
import com.vividsolutions.jts.geom.GeometryFactory;
import oracle.jdbc.OracleConnection;
import oracle.jdbc.OracleResultSet;
import oracle.sql.BLOB;
import oracle.sql.ARRAY;
import com.ximple.util.PrintfFormat;
import com.ximple.io.dgn7.Element;
import com.ximple.io.dgn7.Dgn7fileException;
import com.ximple.io.dgn7.ElementType;
import com.ximple.io.dgn7.IElementHandler;
import com.ximple.io.dgn7.ComplexElement;
import com.ximple.io.dgn7.Dgn7fileReader;
import com.ximple.io.dgn7.Lock;
import com.ximple.io.dgn7.TextElement;
import com.ximple.eofms.jobs.context.AbstractOracleJobContext;
import com.ximple.eofms.jobs.context.postgis.FeatureDgnConvertPostGISJobContext;
import com.ximple.eofms.jobs.context.postgis.GeneralDgnConvertPostGISJobContext;
import com.ximple.eofms.jobs.context.postgis.IndexDgnConvertPostGISJobContext;
import com.ximple.eofms.jobs.context.postgis.OracleConvertPostGISJobContext;
import com.ximple.eofms.util.BinConverter;
import com.ximple.eofms.util.ByteArrayCompressor;
import com.ximple.eofms.util.StringUtils;
import com.ximple.eofms.jobs.context.AbstractOracleJobContext;
import com.ximple.eofms.jobs.context.FeatureDgnConvertShpJobContext;
import com.ximple.eofms.jobs.context.GeneralDgnConvertShpJobContext;
import com.ximple.eofms.jobs.context.IndexDgnConvertShpJobContext;
import com.ximple.eofms.jobs.context.OracleConvertShapefilesJobContext;
import com.ximple.io.dgn7.ComplexElement;
import com.ximple.io.dgn7.Dgn7fileException;
import com.ximple.io.dgn7.Dgn7fileReader;
import com.ximple.io.dgn7.Element;
import com.ximple.io.dgn7.ElementType;
import com.ximple.io.dgn7.IElementHandler;
import com.ximple.io.dgn7.Lock;
import com.ximple.io.dgn7.TextElement;
import com.ximple.util.PrintfFormat;
import org.apache.commons.collections.OrderedMap;
import org.apache.commons.collections.OrderedMapIterator;
import org.apache.commons.collections.map.LinkedMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.data.DataStore;
import org.geotools.data.oracle.OracleDataStore;
import org.geotools.data.postgis.PostgisDataStore;
import org.geotools.data.postgis.PostgisDataStoreFactory;
import org.geotools.feature.IllegalAttributeException;
import org.geotools.feature.SchemaException;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import oracle.jdbc.OracleConnection;
import oracle.jdbc.OracleResultSet;
import oracle.sql.ARRAY;
import oracle.sql.BLOB;
public class OracleConvertDgn2PostGISJob extends AbstractOracleDatabaseJob
{
@@ -61,7 +67,7 @@
    private static final String PGPORT = "PGPORT";
    private static final String PGSCHEMA = "PGSCHEMA";
    private static final String PGUSER = "PGUSER";
    private static final String PGPASS = " PGPASS";
    private static final String PGPASS = "PGPASS";
    private static final String USEWKB = "USEWKB";
    private static final int FETCHSIZE = 30;
@@ -80,6 +86,8 @@
        }
    }
    protected static PostgisDataStoreFactory dataStoreFactory = new PostgisDataStoreFactory();
    GeometryFactory _geomFactory = new GeometryFactory();
    protected String _pgHost;
    protected String _pgDatabase;
@@ -89,30 +97,20 @@
    protected String _pgPassword;
    protected String _pgUseWKB;
    // static PostgisDataStoreFactory factory = new PostgisDataStoreFactory();
    protected Map pgProperties;
    protected PostgisDataStore targetDataStore;
    /*
        f = PostgisTests.newFixture();
        remote = new HashMap();
        remote.put("dbtype", "postgis");
        remote.put("charset", "");
        remote.put("host", f.host);
        remote.put("port", f.port);
        remote.put("database", f.database);
        remote.put("user", f.user);
        remote.put("passwd", f.password);
        remote.put("namespace", f.namespace);
     */
    /*
        PostgisDataStore pg = new PostgisDataStore(pool, f.schema, getName(),
                PostgisDataStore.OPTIMIZE_SQL);
        pg.setWKBEnabled(WKB_ENABLED);
        pg.setEstimatedExtent(true);
        pg.setFIDMapper("road", new TypedFIDMapper(new BasicFIDMapper("fid", 255, false), "road"));
        pg.setFIDMapper("river", new TypedFIDMapper(new BasicFIDMapper("fid", 255, false), "river"));
        pg.setFIDMapper("testset",
            new TypedFIDMapper(new BasicFIDMapper("gid", 255, true), "testset"));
     */
       PostgisDataStore pg = new PostgisDataStore(pool, f.schema, getName(),
               PostgisDataStore.OPTIMIZE_SQL);
       pg.setWKBEnabled(WKB_ENABLED);
       pg.setEstimatedExtent(true);
       pg.setFIDMapper("road", new TypedFIDMapper(new BasicFIDMapper("fid", 255, false), "road"));
       pg.setFIDMapper("river", new TypedFIDMapper(new BasicFIDMapper("fid", 255, false), "river"));
       pg.setFIDMapper("testset",
           new TypedFIDMapper(new BasicFIDMapper("gid", 255, true), "testset"));
    */
    /*
        Transaction transaction = new DefaultTransaction("attemptWriteFW");
        FeatureWriter writer = ds.getFeatureWriter(table, transaction);
@@ -139,7 +137,7 @@
    protected AbstractOracleJobContext prepareJobContext(String filterPath)
    {
        return new OracleConvertShapefilesJobContext(filterPath);
        return new OracleConvertPostGISJobContext(getDataPath(), getTargetDataStore(), filterPath);
    }
    protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException
@@ -155,6 +153,7 @@
        _pgUseWKB = dataMap.getString(USEWKB);
        Log logger = getLogger();
        /*
        logger.info("PGHOST=" + _pgHost);
        logger.info("PGDDATBASE=" + _pgDatabase);
        logger.info("PGPORT=" + _pgPort);
@@ -162,31 +161,49 @@
        logger.info("PGUSER=" + _pgUsername);
        logger.info("PGPASS=" + _pgPassword);
        logger.info("USEWKB=" + _pgUseWKB);
        */
        if (_pgHost == null)
        {
            logger.warn("PGHOST is null");
            throw new JobExecutionException("Unknown PostGIS host.");
        }
        if (_pgDatabase == null)
        {
            logger.warn("PGDATABASE is null");
            throw new JobExecutionException("Unknown PostGIS database.");
        }
        if (_pgPort == null)
        {
            logger.warn("PGPORT is null");
            throw new JobExecutionException("Unknown PostGIS port.");
        }
        if (_pgSchema == null)
        {
            logger.warn("PGSCHEMA is null");
            throw new JobExecutionException("Unknown PostGIS schema.");
        }
        if (_pgUsername == null)
        {
            logger.warn("PGUSERNAME is null");
            throw new JobExecutionException("Unknown PostGIS username.");
        }
        if (_pgPassword == null)
        {
            logger.warn("PGPASSWORD is null");
            throw new JobExecutionException("Unknown PostGIS password.");
        }
        Map<String, String> remote = new TreeMap<String, String>();
        remote.put("dbtype", "postgis");
        remote.put("charset", "UTF-8");
        remote.put("host", _pgHost);
        remote.put("port", _pgPort);
        remote.put("database", _pgDatabase);
        remote.put("user", _pgUsername);
        remote.put("passwd", _pgPassword);
        remote.put("namespace", null);
        pgProperties = remote;
    }
    public void execute(JobExecutionContext context) throws JobExecutionException
@@ -200,11 +217,26 @@
        // Log the time the job started
        logger.info(jobName + " fired at " + new Date());
        extractJobConfiguration(jobDetail);
        createSourceDataStore();
        createTargetDataStore();
        if (getSourceDataStore() == null)
        {
            logger.warn("Cannot connect source oracle database.");
            throw new JobExecutionException("Cannot connect source oracle database.");
        }
        if (getTargetDataStore() == null)
        {
            logger.warn("Cannot connect source postgreSQL database.");
            throw new JobExecutionException("Cannot connect source postgreSQL database.");
        }
        Calendar cal = Calendar.getInstance();
        Date startTime = cal.getTime();
        try
        {
            logger.info("-- step:clearOutputDirectory --");
            clearOutputDirectory();
            logger.info("-- step:clearOutputDatabase --");
            clearOutputDatabase();
            boolean bFirst = true;
            if (checkConvertDB())
            {
@@ -212,12 +244,9 @@
                for (String orgSchema : _orgSchema)
                {
                    OracleConvertShapefilesJobContext jobContext = (OracleConvertShapefilesJobContext) prepareJobContext(_filterPath);
                    jobContext.setConnectionInfo(_oracleHost, _oraclePort, _oracleInstance);
                    jobContext.setLogin(_username, _password);
                    jobContext.setShapeData(_dataPath);
                    jobContext.setConvertDB(_convertDB);
                    jobContext.setConvertFile(_convertFile);
                    OracleConvertPostGISJobContext jobContext =
                            (OracleConvertPostGISJobContext) prepareJobContext(_filterPath);
                    jobContext.setSourceDataStore(getSourceDataStore());
                    jobContext.setConvertElementIn(_convertElementIn);
                    jobContext.setElementLogging(checkElementLogging());
                    jobContext.setExecutionContext(context);
@@ -254,6 +283,13 @@
                logger.info("-- step:createDummyFeatureFile --");
                createDummyFeatureFile(context);
            }
            disconnect();
            Date endTime = cal.getTime();
            Date time = new Date(endTime.getTime() - startTime.getTime());
            // public static final String DATE_FORMAT_NOW = "yyyy-MM-dd HH:mm:ss";
            // SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT_NOW);
            logger.warn("use time = " + time);
        } catch (SQLException e)
        {
            logger.warn(e.getMessage(), e);
@@ -272,15 +308,15 @@
     * @param jobContext job context
     * @throws SQLException sql exception
     */
    private void copyConnectivity(OracleConvertShapefilesJobContext jobContext) throws SQLException
    private void copyConnectivity(OracleConvertPostGISJobContext jobContext) throws SQLException
    {
        OracleConnection connection = jobContext.getOracleConnection();
        Statement stmt = connection.createStatement();
        stmt.execute(OracleConvertShapefilesJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK);
        stmt.execute(OracleConvertShapefilesJobContext.COPY_CONNECTIVITY_TO_WEBCHECK);
        stmt.execute(AbstractOracleJobContext.TRUNCATE_CONNECTIVITY_WEBCHECK);
        stmt.execute(AbstractOracleJobContext.COPY_CONNECTIVITY_TO_WEBCHECK);
    }
    private void exetcuteConvert(OracleConvertShapefilesJobContext jobContext,
    private void exetcuteConvert(OracleConvertPostGISJobContext jobContext,
                                 String querySchema, String dataPath) throws SQLException
    {
        int order = 0;
@@ -429,7 +465,7 @@
        return orderedMap;
    }
    protected void queryIgsetElement(OracleConvertShapefilesJobContext jobContext,
    protected void queryIgsetElement(OracleConvertPostGISJobContext jobContext,
                                     String srcschema, String srctable) throws SQLException
    {
        OracleConnection connection = jobContext.getOracleConnection();
@@ -471,7 +507,7 @@
        stmtSrc.close();
    }
    protected void queryRawElement(OracleConvertShapefilesJobContext jobContext,
    protected void queryRawElement(OracleConvertPostGISJobContext jobContext,
                                   String srcschema, String srctable) throws SQLException
    {
        OracleConnection connection = jobContext.getOracleConnection();
@@ -583,7 +619,8 @@
     * °õ¦æÂà´«¯Á¤Þ¹ÏÀɪº¤u§@
     *
     * @param context ¤u§@°õ¦æÀô¹Ò
     * @throws org.quartz.JobExecutionException exception
     * @throws org.quartz.JobExecutionException
     *          exception
     */
    private void convertIndexDesignFile(JobExecutionContext context) throws JobExecutionException
    {
@@ -609,7 +646,8 @@
        for (File dgnFile : dgnFiles)
        {
            IndexDgnConvertShpJobContext convertContext = new IndexDgnConvertShpJobContext(getDataPath());
            IndexDgnConvertPostGISJobContext convertContext =
                    new IndexDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore());
            logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---");
            try
            {
@@ -656,7 +694,7 @@
        }
    }
    protected void scanIndexDgnElement(IndexDgnConvertShpJobContext convertContext)
    protected void scanIndexDgnElement(IndexDgnConvertPostGISJobContext convertContext)
            throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException
    {
        Dgn7fileReader reader = convertContext.getReader();
@@ -699,7 +737,8 @@
        logger.debug("ElementRecord Count=" + count);
    }
    private void processIndexElement(Element element, IndexDgnConvertShpJobContext convertContext) throws IllegalAttributeException, SchemaException
    private void processIndexElement(Element element, IndexDgnConvertPostGISJobContext convertContext)
            throws IllegalAttributeException, SchemaException
    {
        if (element instanceof TextElement)
        {
@@ -712,7 +751,8 @@
     * °õ¦æÂà´«¨ä¥L³]­p¹ÏÀɪº¤u§@
     *
     * @param context jobContext
     * @throws org.quartz.JobExecutionException exception
     * @throws org.quartz.JobExecutionException
     *          exception
     */
    private void convertOtherDesignFile(JobExecutionContext context) throws JobExecutionException
    {
@@ -738,7 +778,8 @@
        for (File dgnFile : dgnFiles)
        {
            GeneralDgnConvertShpJobContext convertContext = new GeneralDgnConvertShpJobContext(getDataPath());
            GeneralDgnConvertPostGISJobContext convertContext =
                    new GeneralDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore());
            logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
            try
            {
@@ -785,7 +826,7 @@
        }
    }
    public void scanOtherDgnElement(GeneralDgnConvertShpJobContext convertContext)
    public void scanOtherDgnElement(GeneralDgnConvertPostGISJobContext convertContext)
            throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException
    {
        Dgn7fileReader reader = convertContext.getReader();
@@ -828,15 +869,16 @@
        logger.debug("ElementRecord Count=" + count);
    }
    private void processOtherElement(Element element, GeneralDgnConvertShpJobContext convertContext)
    private void processOtherElement(Element element, GeneralDgnConvertPostGISJobContext convertContext)
            throws IllegalAttributeException, SchemaException
    {
        convertContext.putFeatureCollection(element);
    }
    private void clearOutputDirectory()
    private void clearOutputDatabase()
    {
        File outDataPath = new File(getDataPath(), OracleConvertShapefilesJobContext.SHPOUTPATH);
        /*
        File outDataPath = new File(getDataPath(), OracleConvertPostGISJobContext.SHPOUTPATH);
        if (outDataPath.exists() && outDataPath.isDirectory())
        {
            deleteFilesInPath(outDataPath);
@@ -851,6 +893,7 @@
        {
            deleteFilesInPath(outDataPath);
        }
        */
    }
    private void deleteFilesInPath(File outDataPath)
@@ -911,7 +954,8 @@
        for (File dgnFile : dgnFiles)
        {
            FeatureDgnConvertShpJobContext convertContext = new FeatureDgnConvertShpJobContext(getDataPath(), _filterPath);
            FeatureDgnConvertPostGISJobContext convertContext =
                    new FeatureDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), _filterPath);
            logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
            try
            {
@@ -958,7 +1002,7 @@
        }
    }
    public void scanFeatureDgnElement(FeatureDgnConvertShpJobContext convertContext)
    public void scanFeatureDgnElement(FeatureDgnConvertPostGISJobContext convertContext)
            throws Dgn7fileException, IOException, IllegalAttributeException, SchemaException
    {
        Dgn7fileReader reader = convertContext.getReader();
@@ -1001,7 +1045,7 @@
        logger.debug("ElementRecord Count=" + count);
    }
    private void processFeatureElement(Element element, FeatureDgnConvertShpJobContext convertContext)
    private void processFeatureElement(Element element, FeatureDgnConvertPostGISJobContext convertContext)
            throws IllegalAttributeException, SchemaException
    {
        convertContext.putFeatureCollection(element);
@@ -1022,4 +1066,39 @@
        }
        */
    }
    public DataStore getTargetDataStore()
    {
        return targetDataStore;
    }
    protected void createTargetDataStore() throws JobExecutionException
    {
        if (targetDataStore != null)
        {
            targetDataStore.dispose();
            targetDataStore = null;
        }
        /*
        if (!isDriverFound())
        {
            throw new JobExecutionException("Oracle JDBC Driver not found.-" + JDBC_DRIVER);
        }
        */
        if (!dataStoreFactory.canProcess(pgProperties))
        {
            getLogger().warn("cannot process properties-");
            throw new JobExecutionException("cannot process properties-");
        }
        try
        {
            targetDataStore = (PostgisDataStore) dataStoreFactory.createDataStore(pgProperties);
        } catch (IOException e)
        {
            getLogger().warn(e.getMessage(), e);
            throw new JobExecutionException(e.getMessage(), e);
        }
    }
}