forked from geodmms/xdgnjobs

?? ?
2008-06-16 aa92e40672e9ea2a6b55f528d5b986314d652a40
update for EOFM-122
9 files modified
162 ■■■■■ changed files
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2MySQLJob.java 13 ●●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2OraSDOJob.java 14 ●●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java 15 ●●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2ShpJob.java 11 ●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleElementLogger.java 23 ●●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/OracleConvertMySQLJobContext.java 40 ●●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/OracleConvertOraSDOJobContext.java 12 ●●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/AbstractOracleToPostGISJobContext.java 2 ●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/OracleConvertPostGISJobContext.java 32 ●●●● patch | view | raw | blame | history
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2MySQLJob.java
@@ -72,7 +72,6 @@
    private static final String USEWKB = "USEWKB";
    private static final int FETCHSIZE = 30;
    private static final int BATCHSIZE = 25;
    private static final int COMMITSIZE = 20;
    class Pair
@@ -362,10 +361,9 @@
        try
        {
            rs = stmt.executeQuery(fetchStmt);
            int size = rs.getMetaData().getColumnCount();
            while (rs.next())
            {
                int size = rs.getMetaData().getColumnCount();
                Object[] values = new Object[size];
                for (int i = 0; i < size; i++)
@@ -409,10 +407,9 @@
        stmt.setFetchSize(FETCHSIZE);
        ResultSet rs = stmt.executeQuery(fetchStmt);
        int size = rs.getMetaData().getColumnCount();
        while (rs.next())
        {
            int size = rs.getMetaData().getColumnCount();
            Object[] values = new Object[size];
            for (int i = 0; i < size; i++)
@@ -448,12 +445,12 @@
        stmtSrc.setFetchSize(FETCHSIZE);
        ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt);
        int igdsMetaType = rsSrc.getMetaData().getColumnType(1);
        while (rsSrc.next())
        {
            byte[] raw = null;
            byte[] raw;
            if (rsSrc.getMetaData().getColumnType(1) == Types.BLOB)
            if (igdsMetaType == Types.BLOB)
            {
                BLOB blob = (BLOB) rsSrc.getBlob(1);
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2OraSDOJob.java
@@ -72,7 +72,6 @@
    private static final String USEWKB = "USEWKB";
    private static final int FETCHSIZE = 30;
    private static final int BATCHSIZE = 25;
    private static final int COMMITSIZE = 20;
    class Pair
@@ -362,10 +361,9 @@
        try
        {
            rs = stmt.executeQuery(fetchStmt);
            int size = rs.getMetaData().getColumnCount();
            while (rs.next())
            {
                int size = rs.getMetaData().getColumnCount();
                Object[] values = new Object[size];
                for (int i = 0; i < size; i++)
@@ -409,10 +407,9 @@
        stmt.setFetchSize(FETCHSIZE);
        ResultSet rs = stmt.executeQuery(fetchStmt);
        int size = rs.getMetaData().getColumnCount();
        while (rs.next())
        {
            int size = rs.getMetaData().getColumnCount();
            Object[] values = new Object[size];
            for (int i = 0; i < size; i++)
@@ -448,12 +445,11 @@
        stmtSrc.setFetchSize(FETCHSIZE);
        ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt);
        int igdsMetaType = rsSrc.getMetaData().getColumnType(1);
        while (rsSrc.next())
        {
            byte[] raw = null;
            if (rsSrc.getMetaData().getColumnType(1) == Types.BLOB)
            byte[] raw;
            if (igdsMetaType == Types.BLOB)
            {
                BLOB blob = (BLOB) rsSrc.getBlob(1);
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java
@@ -9,12 +9,11 @@
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.sql.Connection;
import java.util.Calendar;
import java.util.Date;
import java.util.Map;
import java.util.TreeMap;
@@ -363,14 +362,13 @@
        ResultSet rs = null;
        stmt.setFetchSize(FETCHSIZE);
        int size = rs.getMetaData().getColumnCount();
        try
        {
            rs = stmt.executeQuery(fetchStmt);
            while (rs.next())
            {
                int size = rs.getMetaData().getColumnCount();
                Object[] values = new Object[size];
                for (int i = 0; i < size; i++)
@@ -415,9 +413,9 @@
        ResultSet rs = stmt.executeQuery(fetchStmt);
        try
        {
            int size = rs.getMetaData().getColumnCount();
        while (rs.next())
        {
            int size = rs.getMetaData().getColumnCount();
            Object[] values = new Object[size];
            for (int i = 0; i < size; i++)
@@ -453,12 +451,11 @@
        stmtSrc.setFetchSize(FETCHSIZE);
        ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt);
        int igdsMetaType = rsSrc.getMetaData().getColumnType(1);
        while (rsSrc.next())
        {
            byte[] raw = null;
            if (rsSrc.getMetaData().getColumnType(1) == Types.BLOB)
            byte[] raw;
            if (igdsMetaType == Types.BLOB)
            {
                BLOB blob = (BLOB) rsSrc.getBlob(1);
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2ShpJob.java
@@ -264,10 +264,10 @@
        try
        {
            rs = stmt.executeQuery(fetchStmt);
            int size = rs.getMetaData().getColumnCount();
            while (rs.next())
            {
                int size = rs.getMetaData().getColumnCount();
                Object[] values = new Object[size];
                for (int i = 0; i < size; i++)
@@ -309,12 +309,11 @@
        Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        stmt.setFetchSize(FETCHSIZE);
        ResultSet rs = stmt.executeQuery(fetchStmt);
        int size = rs.getMetaData().getColumnCount();
        while (rs.next())
        {
            int size = rs.getMetaData().getColumnCount();
            Object[] values = new Object[size];
            for (int i = 0; i < size; i++)
@@ -350,12 +349,12 @@
        stmtSrc.setFetchSize(FETCHSIZE);
        ResultSet rsSrc = stmtSrc.executeQuery(fetchSrcStmt);
        int igdsMetaType = rsSrc.getMetaData().getColumnType(1);
        while (rsSrc.next())
        {
            byte[] raw = null;
            byte[] raw;
            if (rsSrc.getMetaData().getColumnType(1) == Types.BLOB)
            if (igdsMetaType == Types.BLOB)
            {
                BLOB blob = (BLOB) rsSrc.getBlob(1);
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleElementLogger.java
@@ -11,6 +11,7 @@
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -45,6 +46,7 @@
    private FileOutputStream fos = null;
    private FileChannel fch = null;
    private int logCount = 0;
    private ArrayList<byte[]> dgnFileHeader = null;
    public OracleElementLogger(Connection connection)
    {
@@ -226,6 +228,17 @@
            logger.warn("connection is null");
            return;
        }
        if (dgnFileHeader != null)
        {
            for (byte[] raw : dgnFileHeader)
            {
                putElementIntoStream(raw);
            }
            return;
        }
        dgnFileHeader = new ArrayList<byte[]>();
        String fetchSrcStmtFmt = "SELECT IGDSELM FROM \"%s\".\"%s\" ORDER BY ROWID";
        PrintfFormat spf = new PrintfFormat(fetchSrcStmtFmt);
        String fetchSrcStmt = spf.sprintf(new Object[]{currentSchema, TAB_IGDSSEED});
@@ -249,9 +262,12 @@
            if (raw != null)
            {
                dgnFileHeader.add(raw);
                putElementIntoStream(raw);
            }
        }
        rsSrc.close();
        stmtSrc.close();
    }
    private void putElementIntoStream(byte[] raw) throws IOException
@@ -268,7 +284,7 @@
        byte[] chunk = new byte[optimalSize];
        InputStream is = blob.getBinaryStream(0);
        ByteBuffer buffer = null;    // ByteBuffer.allocate(optimalSize);
        int len = 0;
        int len;
        try
        {
@@ -312,4 +328,9 @@
            fch = null;
        }
    }
    public boolean isSchemaChanged()
    {
        return schemaChanged;
    }
}
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/mysql/OracleConvertMySQLJobContext.java
@@ -1,43 +1,42 @@
package com.ximple.eofms.jobs.context.mysql;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.net.URL;
import java.net.MalformedURLException;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.transaction.util.LoggerFacade;
import org.apache.commons.transaction.util.CommonsLoggingLogger;
import org.apache.commons.transaction.memory.PessimisticMapWrapper;
import org.apache.commons.digester.Digester;
import org.apache.commons.digester.xmlrules.DigesterLoader;
import org.geotools.data.FeatureWriter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.transaction.memory.PessimisticMapWrapper;
import org.apache.commons.transaction.util.CommonsLoggingLogger;
import org.apache.commons.transaction.util.LoggerFacade;
import org.geotools.data.DataStore;
import org.geotools.data.FeatureWriter;
import org.geotools.data.Transaction;
import org.geotools.data.SchemaNotFoundException;
import org.geotools.data.mysql.MySQLDataStoreFactory;
import org.geotools.feature.Feature;
import org.geotools.feature.FeatureType;
import org.geotools.feature.SimpleFeature;
import org.geotools.feature.IllegalAttributeException;
import org.geotools.feature.SimpleFeature;
import org.quartz.JobExecutionContext;
import org.xml.sax.SAXException;
import com.vividsolutions.jts.util.Assert;
import com.ximple.eofms.jobs.OracleElementLogger;
import com.ximple.eofms.filter.ElementDispatcher;
import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter;
import com.ximple.eofms.filter.ElementDispatcher;
import com.ximple.eofms.jobs.OracleElementLogger;
import com.ximple.io.dgn7.ComplexElement;
import com.ximple.io.dgn7.Element;
import com.ximple.io.dgn7.FrammeAttributeData;
import com.ximple.io.dgn7.ComplexElement;
public class OracleConvertMySQLJobContext extends AbstractOracleToMySQLJobContext
{
@@ -70,7 +69,7 @@
    private JobExecutionContext executionContext;
    private String currentSchema = null;
    private String pgCurrentSchema = null;
    private String mysqlCurrentSchema = null;
    private boolean schemaChanged = false;
    // private String _convertElementIn = null;
@@ -289,6 +288,11 @@
        return elmLogger;
    }
    public boolean isSchemaChanged()
    {
        return schemaChanged;
    }
    public String getCurrentSchema()
    {
        return currentSchema;
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/orasdo/OracleConvertOraSDOJobContext.java
@@ -69,7 +69,7 @@
    private JobExecutionContext executionContext;
    private String currentSchema = null;
    private String pgCurrentSchema = null;
    private String oraCurrentSchema = null;
    private boolean schemaChanged = false;
    // private String _convertElementIn = null;
@@ -229,10 +229,9 @@
                }
                ArrayList features = (ArrayList) featuresContext.get(featureType);
                Iterator itFeature = features.iterator();
                while (itFeature.hasNext())
                for (Object feature1 : features)
                {
                    Feature feature = (Feature) itFeature.next();
                    Feature feature = (Feature) feature1;
                    ((SimpleFeature) writer.next()).setAttributes(feature.getAttributes(null));
                }
                //writer.close();
@@ -288,6 +287,11 @@
        return elmLogger;
    }
    public boolean isSchemaChanged()
    {
        return schemaChanged;
    }
    public String getCurrentSchema()
    {
        return currentSchema;
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/AbstractOracleToPostGISJobContext.java
@@ -116,7 +116,7 @@
     */
    private static final int MAX_ALLOWED_VALUE = 10485760;
    protected static final int BATCHSIZE = 256;
    protected static final int BATCHSIZE = 512;
    /**
     * Well Known Text writer (from JTS).
xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/context/postgis/OracleConvertPostGISJobContext.java
@@ -5,11 +5,11 @@
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.Statement;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
@@ -18,28 +18,23 @@
import org.apache.commons.digester.xmlrules.DigesterLoader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.transaction.memory.PessimisticMapWrapper;
import org.apache.commons.transaction.util.CommonsLoggingLogger;
import org.apache.commons.transaction.util.LoggerFacade;
import org.geotools.data.DataStore;
import org.geotools.data.FeatureWriter;
import org.geotools.data.Transaction;
import org.geotools.data.postgis.PostgisDataStoreFactory;
import org.geotools.feature.Feature;
import org.geotools.feature.FeatureType;
import org.geotools.feature.IllegalAttributeException;
import org.geotools.feature.SimpleFeature;
import org.geotools.feature.FeatureTypeBuilder;
import org.geotools.feature.SchemaException;
import org.postgresql.util.PSQLException;
import org.quartz.JobExecutionContext;
import org.xml.sax.SAXException;
import org.postgresql.util.PSQLException;
import com.vividsolutions.jts.util.Assert;
import com.ximple.eofms.filter.AbstractFLinkageDispatchableFilter;
import com.ximple.eofms.filter.ElementDispatcher;
import com.ximple.eofms.filter.CreateFeatureTypeEventListener;
import com.ximple.eofms.filter.ElementDispatcher;
import com.ximple.eofms.filter.FeatureTypeEvent;
import com.ximple.eofms.jobs.OracleElementLogger;
import com.ximple.io.dgn7.ComplexElement;
@@ -210,9 +205,11 @@
    private void updateDataStore()
    {
        Iterator<FeatureType> it = txFeaturesContext.keySet().iterator();
        String currentStmt = null;
        try
        {
            Connection conn = getConnection();
            boolean autoCommit = conn.getAutoCommit();
            conn.setAutoCommit(true);
            while (it.hasNext())
            {
                FeatureType featureType = it.next();
@@ -220,9 +217,6 @@
                String bindingStmt = makePrepareInsertSql(featureType);
                ArrayList<Feature> features = txFeaturesContext.get(featureType);
                Connection conn = getConnection();
                boolean autoCommit = conn.getAutoCommit();
                conn.setAutoCommit(true);
                PreparedStatement pstmt = conn.prepareStatement(bindingStmt);
                for (Feature feature : features)
@@ -257,9 +251,9 @@
                pstmt.close();
                features.clear();
                conn.setAutoCommit(autoCommit);
                logger.debug("End Save into PostGIS:" + featureType.getTypeName());
            }
            conn.setAutoCommit(autoCommit);
            accumulate = 0;
        } catch (SQLException e)
        {
@@ -354,7 +348,8 @@
                        stmt.execute(stmtText);
                        stmt.close();
                    }
                } else {
                } else
                {
                    deleteTable(conn, featureName);
                }
                conn.close();
@@ -387,4 +382,9 @@
            }
        }
    }
    public boolean isSchemaChanged()
    {
        return schemaChanged;
    }
}