File was renamed from xdgnjobs/ximple-spatialjob/src/main/java/com/ximple/eofms/jobs/GeoserverResetConfigJob.java |
| | |
| | | |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfig; |
| | | import com.ximple.eofms.geoserver.config.XGeosDataConfigMapping; |
| | | import com.ximple.eofms.jobs.context.AbstractOracleJobContext; |
| | | import com.ximple.eofms.util.PrintfFormat; |
| | | import it.geosolutions.geoserver.rest.GeoServerRESTManager; |
| | | import it.geosolutions.geoserver.rest.GeoServerRESTPublisher; |
| | | import it.geosolutions.geoserver.rest.GeoServerRESTReader; |
| | | import it.geosolutions.geoserver.rest.decoder.RESTDataStore; |
| | | import it.geosolutions.geoserver.rest.encoder.datastore.GSPostGISDatastoreEncoder; |
| | | import it.geosolutions.geoserver.rest.manager.GeoServerRESTStoreManager; |
| | | import org.apache.commons.collections.MultiMap; |
| | | import org.apache.commons.digester3.Digester; |
| | |
| | | import org.apache.commons.digester3.xmlrules.FromXmlRulesModule; |
| | | import org.apache.commons.logging.Log; |
| | | import org.apache.commons.logging.LogFactory; |
| | | import org.geotools.data.DataStore; |
| | | import org.geotools.data.edbgeo.PostgisDataStore; |
| | | import org.geotools.data.postgis.PostGISDialect; |
| | | import org.geotools.jdbc.JDBCDataStore; |
| | | import org.geotools.jdbc.SQLDialect; |
| | | import org.quartz.Job; |
| | | import org.geotools.data.Transaction; |
| | | import org.quartz.JobDataMap; |
| | | import org.quartz.JobDetail; |
| | | import org.quartz.JobExecutionContext; |
| | | import org.quartz.JobExecutionException; |
| | | import org.xml.sax.SAXException; |
| | | |
| | | import javax.naming.ConfigurationException; |
| | | import javax.sql.DataSource; |
| | | import java.io.File; |
| | | import java.io.IOException; |
| | | import java.net.MalformedURLException; |
| | | import java.net.URI; |
| | | import java.net.URISyntaxException; |
| | | import java.net.URL; |
| | | import java.sql.*; |
| | | import java.util.ArrayList; |
| | | import java.util.HashMap; |
| | | import java.util.List; |
| | | |
| | | public class GeoserverResetConfigJob extends OracleConvertDgn2PostGISJob { |
| | | final static Log logger = LogFactory.getLog(GeoserverResetConfigJob.class); |
| | | public class GeoserverIntegrateConfigJob extends OracleConvertDgn2PostGISJob { |
| | | final static Log logger = LogFactory.getLog(GeoserverIntegrateConfigJob.class); |
| | | |
| | | private static final String SKIPCONFIGJOB = "SKIPCONFIGJOB"; |
| | | private static final String MASTERMODE = "MASTERMODE"; |
| | | private static final String EPSG = "EPSG:"; |
| | | private static final String DEFAULTNAMESPACE = "tpc"; |
| | | private static final String DEFAULTNAMESPACE = "xtpc"; |
| | | private static final String XGEOSDATACONFIG_PATH = "xgeosdataconfig.xml"; |
| | | private static final String XGEOSRULES_NAME = "DefaultXGeosDataConfigRules.xml"; |
| | | private static final String GEOSERVER_BASEURL = "GEOSERVER_URL"; |
| | | private static final String GEOSERVER_USER = "GEOSERVER_USER"; |
| | | private static final String GEOSERVER_PASS = "GEOSERVER_PASS"; |
| | | |
| | | // private static final int MAGIC_BLOCKSIZE = (64 * 1024 * 1024) - (32 * 1024); |
| | | |
| | |
| | | |
| | | private static XGeosDataConfigMapping xgeosDataConfigMapping = null; |
| | | |
| | | protected String _geoServerURL; |
| | | protected String _geoServerUser; |
| | | protected String _geoServerPass; |
| | | |
| | | private long queryTime = 0; |
| | | private long queryTimeStart = 0; |
| | | |
| | | public Log getLogger() { |
| | | return logger; |
| | | } |
| | | |
| | | protected AbstractOracleJobContext prepareJobContext(String targetSchemaName, String filterPath, |
| | | boolean profileMode, |
| | | boolean useTransform) { |
| | | return super.prepareJobContext(targetSchemaName, filterPath, profileMode, useTransform); |
| | | } |
| | | |
| | | protected void extractJobConfiguration(JobDetail jobDetail) throws JobExecutionException { |
| | | super.extractJobConfiguration(jobDetail); |
| | | |
| | | JobDataMap dataMap = jobDetail.getJobDataMap(); |
| | | _geoServerURL = dataMap.getString(GEOSERVER_BASEURL); |
| | | _geoServerUser = dataMap.getString(GEOSERVER_USER); |
| | | _geoServerPass = dataMap.getString(GEOSERVER_PASS); |
| | | |
| | | if (_geoServerURL == null) { |
| | | logger.warn("GEOSERVER_URL is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_URL."); |
| | | } |
| | | if (_geoServerUser == null) { |
| | | logger.warn("GEOSERVER_USER is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_USER."); |
| | | } |
| | | if (_geoServerPass == null) { |
| | | logger.warn("GEOSERVER_PASS is null"); |
| | | throw new JobExecutionException("Unknown GEOSERVER_PASS."); |
| | | } |
| | | } |
| | | |
| | | protected XGeosDataConfigMapping getConfigMapping() { |
| | | if (xgeosDataConfigMapping == null) { |
| | | final URL rulesURL = XGeosDataConfigMapping.class.getResource(XGEOSRULES_NAME); |
| | | assert rulesURL != null; |
| | | /* |
| | | FromXmlRulesModule rules = new FromXmlRulesModule() { |
| | | @Override |
| | | protected void loadRules() { |
| | | this.loadXMLRules(rulesURL); |
| | | } |
| | | }; |
| | | |
| | | Digester digester = DigesterLoader.newLoader(rules).newDigester(); |
| | | */ |
| | | DigesterLoader loader = DigesterLoader.newLoader( new FromXmlRulesModule() { |
| | | @Override |
| | | protected void loadRules() { |
| | | loadXMLRules(rulesURL); |
| | | } |
| | | |
| | | }); |
| | | Digester digester = loader.newDigester(); |
| | | /* |
| | | File rootDir = GeoserverDataDirectory.getGeoserverDataDirectory(); |
| | | File xfmsConfigDir; |
| | |
| | | logger.warn("no xmark dir found, creating new one"); |
| | | //if for some bizarre reason we don't fine the dir, make a new one. |
| | | xfmsConfigDir = new File(rootDir, "xdgnjobs"); |
| | | } |
| | | } x |
| | | |
| | | File xfmsConfigFile = new File(xfmsConfigDir, XGEOSDATACONFIG_PATH); |
| | | try { |
| | |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | */ |
| | | final URL configDataURL = XGeosDataConfigMapping.class.getResource(XGEOSDATACONFIG_PATH); |
| | | try { |
| | | xgeosDataConfigMapping = (XGeosDataConfigMapping) digester.parse(configDataURL); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SAXException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | |
| | | } |
| | | return xgeosDataConfigMapping; |
| | | } |
| | | |
| | | @Override |
| | | public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException { |
| | | |
| | | super.execute(jobExecutionContext); |
| | | GeoServerRESTReader reader; |
| | | GeoServerRESTManager manager; |
| | | GeoServerRESTPublisher publisher; |
| | | |
| | | createTargetDataStore(); |
| | | if (getSourceDataStore() == null) { |
| | | logger.warn("Cannot connect source oracle database."); |
| | | throw new JobExecutionException("Cannot connect source oracle database."); |
| | | } |
| | | |
| | | if (getTargetDataStore() == null) { |
| | | logger.warn("Cannot connect source postgreSQL database."); |
| | | throw new JobExecutionException("Cannot connect source postgreSQL database."); |
| | | } |
| | | |
| | | if (isProfileMode()) { |
| | | queryTime = 0; |
| | | } |
| | | |
| | | long t1 = System.currentTimeMillis(); |
| | | String targetSchemaName; |
| | | |
| | | try { |
| | | resetPostgisViewMapping(jobExecutionContext); |
| | | resetGeoServerConfig(jobExecutionContext); |
| | | } finally { |
| | | disconnect(); |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * 重新建立所有重新建立所有PostGIS中的資料庫視景 |
| | | * |
| | | * @param executionContext 批次執行的關係 |
| | | * @param dataStore Geoserver的資料儲存連結 |
| | | * @param ownerName 資料庫視景擁有者名稱 |
| | | */ |
| | | private void resetPostgisViewMapping(JobExecutionContext executionContext, SQLDialect dataStore, String ownerName) { |
| | | private void resetPostgisViewMapping(JobExecutionContext executionContext) { |
| | | assert executionContext != null; |
| | | try { |
| | | if (dataStore instanceof PostGISDialect) { |
| | | PostGISDialect pgDataStore = (PostGISDialect) dataStore; |
| | | // DataSource dataSource = pgDataStore.getDataSource(); |
| | | // Connection connection = dataSource.getConnection(); |
| | | Connection connection = null; |
| | | String currentTargetSchema = retrieveCurrentSchemaName(connection, |
| | | DataReposVersionManager.VSSTATUS_READY); |
| | | if (currentTargetSchema == null) { |
| | | logger.info("Cannot found schema that status is VSSTATUS_READY[" + |
| | | DataReposVersionManager.VSSTATUS_READY + "]"); |
| | | return; |
| | | } |
| | | |
| | | ArrayList<String> realTableNames = new ArrayList<String>(); |
| | | retrieveAllRealTableName(connection, currentTargetSchema, realTableNames); |
| | | |
| | | HashMap<String, String> viewDefs = retrieveViewDef(connection, "public", "fsc%"); |
| | | HashMap<String, String> tempViewDefs = retrieveViewDef(connection, "public", "indexshape%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | tempViewDefs = viewDefs = retrieveViewDef(connection, "public", "lndtpc%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | |
| | | for (String tableName : realTableNames) { |
| | | resetPostgisDataView(connection, viewDefs, ownerName, currentTargetSchema, tableName); |
| | | } |
| | | |
| | | resetExtraPostgisDataView(connection, ownerName, currentTargetSchema, realTableNames); |
| | | |
| | | updateCurrentRepositoryStatus(connection, currentTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW); |
| | | |
| | | // String[] featureNames = dataStore.getTypeNames(); |
| | | // logger.info("featureNames[] size = " + featureNames.length); |
| | | Connection connection = targetDataStore.getConnection(Transaction.AUTO_COMMIT); |
| | | String ownerName = _pgUsername; |
| | | String currentTargetSchema = retrieveCurrentSchemaName(connection, |
| | | DataReposVersionManager.VSSTATUS_READY); |
| | | if (currentTargetSchema == null) { |
| | | logger.info("Cannot found schema that status is VSSTATUS_READY[" + |
| | | DataReposVersionManager.VSSTATUS_READY + "]"); |
| | | return; |
| | | } |
| | | // } catch (IOException e) { |
| | | // logger.warn(e.getMessage(), e); |
| | | |
| | | ArrayList<String> realTableNames = new ArrayList<String>(); |
| | | retrieveAllRealTableName(connection, currentTargetSchema, realTableNames); |
| | | |
| | | HashMap<String, String> viewDefs = retrieveViewDef(connection, "public", "fsc%"); |
| | | HashMap<String, String> tempViewDefs = retrieveViewDef(connection, "public", "indexshape%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | tempViewDefs = viewDefs = retrieveViewDef(connection, "public", "lndtpc%"); |
| | | viewDefs.putAll(tempViewDefs); |
| | | |
| | | for (String tableName : realTableNames) { |
| | | resetPostgisDataView(connection, viewDefs, ownerName, currentTargetSchema, tableName); |
| | | } |
| | | |
| | | resetExtraPostgisDataView(connection, ownerName, currentTargetSchema, realTableNames); |
| | | |
| | | updateCurrentRepositoryStatus(connection, currentTargetSchema, |
| | | DataReposVersionManager.VSSTATUS_LINKVIEW); |
| | | |
| | | // String[] featureNames = dataStore.getTypeNames(); |
| | | // logger.info("featureNames[] size = " + featureNames.length); |
| | | } catch (IOException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (SQLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } finally { |
| | |
| | | if (stmt != null) stmt.close(); |
| | | } |
| | | } |
| | | |
| | | private void resetGeoServerConfig(JobExecutionContext jobExecutionContext) { |
| | | try { |
| | | URL geoServerURL = new URL(_geoServerURL); |
| | | GeoServerRESTManager manager = new GeoServerRESTManager(geoServerURL, _geoServerUser, _geoServerPass); |
| | | GeoServerRESTReader reader = manager.getReader(); |
| | | List<String> workSpaces = reader.getWorkspaceNames(); |
| | | boolean found = false; |
| | | for (String name : workSpaces) { |
| | | if (name.equalsIgnoreCase(DEFAULTNAMESPACE)) { |
| | | found = true; |
| | | break; |
| | | } |
| | | } |
| | | |
| | | if (!found) { |
| | | GeoServerRESTPublisher publisher = manager.getPublisher(); |
| | | publisher.createWorkspace(DEFAULTNAMESPACE, new URI("http://tpc.ximple.com.tw/geodmms")); |
| | | } |
| | | |
| | | RESTDataStore dataStore = reader.getDatastore(DEFAULTNAMESPACE, "pgDMMS"); |
| | | if (dataStore == null) { |
| | | GeoServerRESTStoreManager storeManager = manager.getStoreManager(); |
| | | GSPostGISDatastoreEncoder store = new GSPostGISDatastoreEncoder("pgDMMS"); |
| | | store.setHost(_pgHost); |
| | | // store.setPort(_pgPort); |
| | | store.setDatabase(_pgDatabase); |
| | | store.setSchema("public"); |
| | | store.setUser(_pgUsername); |
| | | store.setPassword(_pgPassword); |
| | | storeManager.create(DEFAULTNAMESPACE, store); |
| | | } |
| | | |
| | | |
| | | XGeosDataConfigMapping configMapping = getConfigMapping(); |
| | | MultiMap configMultiMap = configMapping.getMapping(); |
| | | for (Object key : configMultiMap.keySet()) { |
| | | List values = (List) configMultiMap.get(key); |
| | | for (Object value : values) { |
| | | XGeosDataConfig xgeosConfig = (XGeosDataConfig) value; |
| | | } |
| | | } |
| | | |
| | | |
| | | } catch (MalformedURLException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } catch (URISyntaxException e) { |
| | | logger.warn(e.getMessage(), e); |
| | | } |
| | | } |
| | | } |