From af57baed0d81ffaf7ae6b889dd26d9647a8862f0 Mon Sep 17 00:00:00 2001
From: ?? ? <ulysseskao@ximple.com.tw>
Date: Fri, 22 Aug 2008 19:26:24 +0800
Subject: [PATCH] update for EOFM-152

---
 xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java |  107 +++++++++++++++++++++++++++++++++++++++++++----------
 1 files changed, 87 insertions(+), 20 deletions(-)

diff --git a/xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java b/xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java
index 65d298c..003668e 100644
--- a/xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java
+++ b/xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/OracleConvertDgn2PostGISJob.java
@@ -24,6 +24,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.geotools.data.DataStore;
+import org.geotools.data.Transaction;
 import org.geotools.data.postgis.PostgisDataStore;
 import org.geotools.data.postgis.PostgisDataStoreFactory;
 import org.geotools.feature.IllegalAttributeException;
@@ -62,6 +63,7 @@
 {
     final static Log logger = LogFactory.getLog(OracleConvertDgn2PostGISJob.class);
 
+    private static final String XGVERSION_NAME = "xgversion";
     private static final String PGHOST = "PGHOST";
     private static final String PGDDATBASE = "PGDDATBASE";
     private static final String PGPORT = "PGPORT";
@@ -105,16 +107,9 @@
         return logger;
     }
 
-    protected AbstractOracleJobContext prepareJobContext(String filterPath)
+    protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath)
     {
-        /*
-        if (oracleJobContext == null)
-        {
-            oracleJobContext = new OracleConvertPostGISJobContext(getDataPath(), getTargetDataStore(), filterPath);
-        }
-        return oracleJobContext;
-        */
-        return new OracleConvertPostGISJobContext(getDataPath(), getTargetDataStore(), _pgSchema, filterPath);
+        return new OracleConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, filterPath);
     }
 
     protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException
@@ -209,22 +204,26 @@
         }
 
         long t1 = System.currentTimeMillis();
+        String targetSchemaName = null;
         try
         {
             logger.info("-- step:clearOutputDatabase --");
             clearOutputDatabase();
+            targetSchemaName = determineTargetSchemaName();
+
             if (checkConvertDB())
             {
                 logger.info("-- step:convertOracleDB --");
 
                 OracleConvertPostGISJobContext jobContext =
-                        (OracleConvertPostGISJobContext) prepareJobContext(_filterPath);
+                        (OracleConvertPostGISJobContext) prepareJobContext(targetSchemaName, _filterPath);
                 jobContext.setSourceDataStore(getSourceDataStore());
                 // jobContext.setConvertElementIn(_convertElementIn);
                 jobContext.setElementLogging(checkElementLogging());
                 jobContext.setExecutionContext(context);
                 if (isCopyConnectivityMode())
                     copyConnectivity(jobContext);
+
 
                 for (String orgSchema : _orgSchema)
                 {
@@ -239,15 +238,15 @@
             if (checkConvertFile())
             {
                 logger.info("-- step:convertIndexDesignFile --");
-                convertIndexDesignFile(context);
+                convertIndexDesignFile(context, targetSchemaName);
                 logger.info("-- step:convertOtherDesignFile --");
-                convertOtherDesignFile(context);
+                convertOtherDesignFile(context, targetSchemaName);
             }
 
             if (checkConvertElementIn())
             {
                 logger.info("-- step:convertFeatureDesignFile --");
-                convertFeatureDesignFile(context);
+                convertFeatureDesignFile(context, targetSchemaName);
             }
 
             if (checkCreateDummy())
@@ -330,7 +329,7 @@
     }
 
     private void exetcuteConvert(OracleConvertPostGISJobContext jobContext,
-                                 String querySchema, String dataPath) throws SQLException
+                                 String querySchema, String targetSchemaName) throws SQLException
     {
         int order = 0;
         OrderedMap map = getBlobStorageList(jobContext.getOracleConnection(),
@@ -638,7 +637,7 @@
      * @throws org.quartz.JobExecutionException
      *          exception
      */
-    private void convertIndexDesignFile(JobExecutionContext context) throws JobExecutionException
+    private void convertIndexDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException
     {
         File indexDir = new File(getDataPath(), "index");
         if (!indexDir.exists())
@@ -663,7 +662,7 @@
         for (File dgnFile : dgnFiles)
         {
             IndexDgnConvertPostGISJobContext convertContext =
-                    new IndexDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), _pgSchema);
+                    new IndexDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName);
             logger.debug("--- start dgnfile-" + dgnFile.toString() + " ---");
             try
             {
@@ -778,7 +777,7 @@
      * @throws org.quartz.JobExecutionException
      *          exception
      */
-    private void convertOtherDesignFile(JobExecutionContext context) throws JobExecutionException
+    private void convertOtherDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException
     {
         File otherDir = new File(getDataPath(), "other");
         if (!otherDir.exists())
@@ -803,7 +802,7 @@
         for (File dgnFile : dgnFiles)
         {
             GeneralDgnConvertPostGISJobContext convertContext =
-                    new GeneralDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), _pgSchema);
+                    new GeneralDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName);
             logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
             try
             {
@@ -961,7 +960,7 @@
         }
     }
 
-    private void convertFeatureDesignFile(JobExecutionContext context) throws JobExecutionException
+    private void convertFeatureDesignFile(JobExecutionContext context, String targetSchemaName) throws JobExecutionException
     {
         File elminDir = new File(getDataPath(), "elmin");
         if (!elminDir.exists())
@@ -986,7 +985,7 @@
         for (File dgnFile : dgnFiles)
         {
             FeatureDgnConvertPostGISJobContext convertContext =
-                    new FeatureDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), _pgSchema, _filterPath);
+                    new FeatureDgnConvertPostGISJobContext(getDataPath(), getTargetDataStore(), targetSchemaName, _filterPath);
             logger.info("--- start dgnfile-" + dgnFile.toString() + " ---");
             try
             {
@@ -1155,4 +1154,72 @@
         }
     }
 
+    private String determineTargetSchemaName() throws IOException
+    {
+        if (targetDataStore == null) return null;
+        Connection connection = null;
+        ResultSet rs = null;
+        String targetSchema = null;
+        boolean needCreate = false;
+        try
+        {
+            connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT);
+            rs = connection.getMetaData().getTables(null, _pgSchema, XGVERSION_NAME, new String[] {"TABLE"});
+            if (!rs.next()) needCreate = true;
+            if (needCreate)
+                createXGeosVersionTable(connection, _pgSchema);
+
+            StringBuilder sbSQL = new StringBuilder();
+        } catch (SQLException e) {
+            logger.warn(e.getMessage(), e);
+        } finally {
+        if (connection != null) try { connection.close(); } catch (SQLException e) {};
+        }
+        return targetSchema;
+    }
+
+    public String encodeSchemaTableName(String schemaName, String tableName)
+    {
+        return "\"" + schemaName + "\".\"" + tableName + "\"";
+    }
+
+    private void createXGeosVersionTable(Connection connection, String pgSchema) throws SQLException
+    {
+        Statement stmt = null;
+        StringBuilder sql = new StringBuilder("CREATE TABLE ");
+        sql.append(encodeSchemaTableName(pgSchema, XGVERSION_NAME));
+        sql.append(" ( vsid serial PRIMARY KEY, ");
+        sql.append(" vsschema character varying(64) NOT NULL, ");
+        sql.append(" vsstatus smallint NOT NULL, ");
+        sql.append(" vstimestamp timestamp with time zone ) ");
+        try
+        {
+            stmt = connection.createStatement();
+            stmt.executeUpdate(sql.toString());
+
+            sql = new StringBuilder("ALTER TABLE ");
+            sql.append(encodeSchemaTableName(pgSchema, XGVERSION_NAME));
+            sql.append(" OWNER TO spatialdb");
+            stmt.executeUpdate(sql.toString());
+
+            sql = new StringBuilder("GRANT ALL ON TABLE ");
+            sql.append(encodeSchemaTableName(pgSchema, XGVERSION_NAME));
+            sql.append(" TO public");
+            stmt.executeUpdate(sql.toString());
+
+            sql = new StringBuilder("INSERT INTO ");
+            sql.append(encodeSchemaTableName(pgSchema, XGVERSION_NAME));
+            sql.append(" ('vsschema', 'vsstatus' ) VALUES (");
+            sql.append("'gisrepo1', 0 ");
+            stmt.executeUpdate(sql.toString());
+
+            sql = new StringBuilder("INSERT INTO ");
+            sql.append(encodeSchemaTableName(pgSchema, XGVERSION_NAME));
+            sql.append(" ('vsschema', 'vsstatus' ) VALUES (");
+            sql.append("'gisrepo2', 0 ");
+            stmt.executeUpdate(sql.toString());
+        } finally {
+            if (stmt != null) stmt.close();
+        }
+    }
 }

--
Gitblit v0.0.0-SNAPSHOT