Skip to main content
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorrescobar2010-07-15 20:05:38 +0000
committerrescobar2010-07-15 20:05:38 +0000
commitd5eec996d6737b06074f8779ee0195b76f3043a3 (patch)
tree21a2a70590b1b107ef7906a02325e35805b144e4 /plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee
parent767f4a24bea71c91b335b956a597689380395f92 (diff)
downloadorg.eclipse.osee-d5eec996d6737b06074f8779ee0195b76f3043a3.tar.gz
org.eclipse.osee-d5eec996d6737b06074f8779ee0195b76f3043a3.tar.xz
org.eclipse.osee-d5eec996d6737b06074f8779ee0195b76f3043a3.zip
Converted Sessions to use core model datastore pattern
Fixed Service Registration to use dependency tracker Fixed Servlet Registraction to user dependency tracker
Diffstat (limited to 'plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee')
-rw-r--r--plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/DatastoreInitOperation.java90
-rw-r--r--plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/BackupTableDataOperation.java130
-rw-r--r--plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/CreateSchemaOperation.java44
-rw-r--r--plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/DatabaseDataExtractor.java395
-rw-r--r--plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ExtractDatabaseSchemaOperation.java874
-rw-r--r--plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ImportDataFromDbServiceOperation.java308
-rw-r--r--plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/OperationFactory.java29
7 files changed, 938 insertions, 932 deletions
diff --git a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/DatastoreInitOperation.java b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/DatastoreInitOperation.java
index e3392dbe419..28c12d9bbb6 100644
--- a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/DatastoreInitOperation.java
+++ b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/DatastoreInitOperation.java
@@ -13,7 +13,7 @@ package org.eclipse.osee.framework.core.datastore;
import java.io.File;
import java.util.logging.Level;
import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.osee.framework.branch.management.IOseeBranchServiceProvider;
+import org.eclipse.osee.framework.branch.management.IOseeBranchService;
import org.eclipse.osee.framework.core.datastore.internal.Activator;
import org.eclipse.osee.framework.core.datastore.schema.operations.OperationFactory;
import org.eclipse.osee.framework.core.enums.PermissionEnum;
@@ -24,7 +24,6 @@ import org.eclipse.osee.framework.core.server.IApplicationServerManager;
import org.eclipse.osee.framework.core.server.OseeServerProperties;
import org.eclipse.osee.framework.core.util.Conditions;
import org.eclipse.osee.framework.database.IOseeDatabaseService;
-import org.eclipse.osee.framework.database.IOseeDatabaseServiceProvider;
import org.eclipse.osee.framework.database.core.OseeInfo;
import org.eclipse.osee.framework.jdk.core.util.GUID;
import org.eclipse.osee.framework.jdk.core.util.Lib;
@@ -35,59 +34,58 @@ import org.eclipse.osee.framework.logging.OseeLog;
*/
public class DatastoreInitOperation extends AbstractOperation {
- private static final String ADD_PERMISSION =
- "INSERT INTO OSEE_PERMISSION (PERMISSION_ID, PERMISSION_NAME) VALUES (?,?)";
+ private static final String ADD_PERMISSION =
+ "INSERT INTO OSEE_PERMISSION (PERMISSION_ID, PERMISSION_NAME) VALUES (?,?)";
- private final IOseeBranchServiceProvider branchServiceProvider;
- private final IOseeDatabaseServiceProvider dbService;
- private final IOseeSchemaProvider schemaProvider;
- private final SchemaCreationOptions options;
- private final IApplicationServerManager appServerManager;
+ private final IOseeBranchService branchService;
+ private final IOseeDatabaseService dbService;
+ private final IOseeSchemaProvider schemaProvider;
+ private final SchemaCreationOptions options;
+ private final IApplicationServerManager appServerManager;
- public DatastoreInitOperation(IApplicationServerManager appServerManager, IOseeDatabaseServiceProvider dbService, IOseeBranchServiceProvider branchServiceProvider, IOseeSchemaProvider schemaProvider, SchemaCreationOptions options) {
- super("Datastore Initialization", Activator.PLUGIN_ID);
- this.appServerManager= appServerManager;
- this.dbService = dbService;
- this.branchServiceProvider = branchServiceProvider;
- this.schemaProvider = schemaProvider;
- this.options = options;
- }
+ public DatastoreInitOperation(IApplicationServerManager appServerManager, IOseeDatabaseService dbService, IOseeBranchService branchService, IOseeSchemaProvider schemaProvider, SchemaCreationOptions options) {
+ super("Datastore Initialization", Activator.PLUGIN_ID);
+ this.appServerManager = appServerManager;
+ this.dbService = dbService;
+ this.branchService = branchService;
+ this.schemaProvider = schemaProvider;
+ this.options = options;
+ }
- @Override
- protected void doWork(IProgressMonitor monitor) throws Exception {
- Conditions.checkExpressionFailOnTrue(dbService.getOseeDatabaseService().isProduction(),
- "Error - attempting to initialize a production datastore.");
+ @Override
+ protected void doWork(IProgressMonitor monitor) throws Exception {
+ Conditions.checkExpressionFailOnTrue(dbService.isProduction(),
+ "Error - attempting to initialize a production datastore.");
- IOperation subOp = OperationFactory.createDbSchema(dbService, schemaProvider, options);
- doSubWork(subOp, monitor, 0.30);
+ IOperation subOp = OperationFactory.createDbSchema(dbService, schemaProvider, options);
+ doSubWork(subOp, monitor, 0.30);
- dbService.getOseeDatabaseService().getSequence().clear();
+ dbService.getSequence().clear();
- appServerManager.executeLookupRegistration();
+ appServerManager.executeLookupRegistration();
- deleteBinaryBackingData();
- String binaryDataPath = OseeServerProperties.getOseeApplicationServerData();
- Lib.deleteDir(new File(binaryDataPath + File.separator + "attr"));
+ deleteBinaryBackingData();
+ String binaryDataPath = OseeServerProperties.getOseeApplicationServerData();
+ Lib.deleteDir(new File(binaryDataPath + File.separator + "attr"));
- OseeInfo.putValue(OseeInfo.DB_ID_KEY, GUID.create());
- addDefaultPermissions();
+ OseeInfo.putValue(OseeInfo.DB_ID_KEY, GUID.create());
+ addDefaultPermissions();
- subOp = branchServiceProvider.getBranchService().createSystemRootBranch(monitor);
- doSubWork(subOp, monitor, 0.30);
- }
+ subOp = branchService.createSystemRootBranch(monitor);
+ doSubWork(subOp, monitor, 0.30);
+ }
- @SuppressWarnings("unchecked")
- private void addDefaultPermissions() throws OseeDataStoreException {
- IOseeDatabaseService service = dbService.getOseeDatabaseService();
- for (PermissionEnum permission : PermissionEnum.values()) {
- service.runPreparedUpdate(ADD_PERMISSION, permission.getPermId(), permission.getName());
- }
- }
+ @SuppressWarnings("unchecked")
+ private void addDefaultPermissions() throws OseeDataStoreException {
+ for (PermissionEnum permission : PermissionEnum.values()) {
+ dbService.runPreparedUpdate(ADD_PERMISSION, permission.getPermId(), permission.getName());
+ }
+ }
- private static void deleteBinaryBackingData() {
- String binaryDataPath = OseeServerProperties.getOseeApplicationServerData();
- OseeLog.log(Activator.class, Level.INFO, String.format("Deleting application server binary data [%s]...",
- binaryDataPath));
- Lib.deleteDir(new File(binaryDataPath + File.separator + "attr"));
- }
+ private static void deleteBinaryBackingData() {
+ String binaryDataPath = OseeServerProperties.getOseeApplicationServerData();
+ OseeLog.log(Activator.class, Level.INFO,
+ String.format("Deleting application server binary data [%s]...", binaryDataPath));
+ Lib.deleteDir(new File(binaryDataPath + File.separator + "attr"));
+ }
}
diff --git a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/BackupTableDataOperation.java b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/BackupTableDataOperation.java
index a9869af0c62..d90e8e7aa2a 100644
--- a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/BackupTableDataOperation.java
+++ b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/BackupTableDataOperation.java
@@ -19,77 +19,77 @@ import org.eclipse.osee.framework.core.datastore.internal.Activator;
import org.eclipse.osee.framework.core.datastore.schema.data.SchemaData;
import org.eclipse.osee.framework.core.datastore.schema.data.TableElement;
import org.eclipse.osee.framework.core.operation.AbstractOperation;
-import org.eclipse.osee.framework.database.IOseeDatabaseServiceProvider;
+import org.eclipse.osee.framework.database.IOseeDatabaseService;
public class BackupTableDataOperation extends AbstractOperation {
- private final File backupDirectory;
- private final Set<String> schemas;
- private final Map<String, SchemaData> userSpecifiedConfig;
- private final Map<String, SchemaData> currentDatabaseConfig;
- private final IOseeDatabaseServiceProvider provider;
+ private final File backupDirectory;
+ private final Set<String> schemas;
+ private final Map<String, SchemaData> userSpecifiedConfig;
+ private final Map<String, SchemaData> currentDatabaseConfig;
+ private final IOseeDatabaseService databaseService;
- public BackupTableDataOperation(IOseeDatabaseServiceProvider provider, String backupDirPath, Set<String> schemas, Map<String, SchemaData> userSpecifiedConfig, Map<String, SchemaData> currentDatabaseConfig) {
- super("Backup Table Data", Activator.PLUGIN_ID);
- this.provider = provider;
- this.schemas = schemas;
- this.userSpecifiedConfig = userSpecifiedConfig;
- this.currentDatabaseConfig = currentDatabaseConfig;
- this.backupDirectory = new File("BackupDirectory");
- }
+ public BackupTableDataOperation(IOseeDatabaseService databaseService, String backupDirPath, Set<String> schemas, Map<String, SchemaData> userSpecifiedConfig, Map<String, SchemaData> currentDatabaseConfig) {
+ super("Backup Table Data", Activator.PLUGIN_ID);
+ this.databaseService = databaseService;
+ this.schemas = schemas;
+ this.userSpecifiedConfig = userSpecifiedConfig;
+ this.currentDatabaseConfig = currentDatabaseConfig;
+ this.backupDirectory = new File("BackupDirectory");
+ }
- @Override
- protected void doWork(IProgressMonitor monitor) throws Exception {
- Set<String> dataToBackup = getTablesToBackup();
- if (dataToBackup.size() > 0) {
- System.out.println(dataToBackup.toString().replaceAll(", ", "\n"));
- clearBackupDirectory();
- DatabaseDataExtractor dbDataExtractor = new DatabaseDataExtractor(provider, schemas, backupDirectory);
- Set<String> backupTables = dataToBackup;
- for (String backupTable : backupTables) {
- dbDataExtractor.addTableNameToExtract(backupTable);
- }
- doSubWork(dbDataExtractor, monitor, 0.90);
- dbDataExtractor.waitForWorkerThreads();
- }
- }
+ @Override
+ protected void doWork(IProgressMonitor monitor) throws Exception {
+ Set<String> dataToBackup = getTablesToBackup();
+ if (dataToBackup.size() > 0) {
+ System.out.println(dataToBackup.toString().replaceAll(", ", "\n"));
+ clearBackupDirectory();
+ DatabaseDataExtractor dbDataExtractor = new DatabaseDataExtractor(databaseService, schemas, backupDirectory);
+ Set<String> backupTables = dataToBackup;
+ for (String backupTable : backupTables) {
+ dbDataExtractor.addTableNameToExtract(backupTable);
+ }
+ doSubWork(dbDataExtractor, monitor, 0.90);
+ dbDataExtractor.waitForWorkerThreads();
+ }
+ }
- private Set<String> getTablesToBackup() {
- Set<String> backupTables = new TreeSet<String>();
- Set<String> userSchemas = userSpecifiedConfig.keySet();
- for (String key : userSchemas) {
- // Backup data only if data exists in the current database
- if (currentDatabaseConfig.containsKey(key)) {
- SchemaData schemaDataInDb = currentDatabaseConfig.get(key);
- Map<String, TableElement> currentDbTableMap = schemaDataInDb.getTableMap();
- Set<String> currentDbTableNames = currentDbTableMap.keySet();
+ private Set<String> getTablesToBackup() {
+ Set<String> backupTables = new TreeSet<String>();
+ Set<String> userSchemas = userSpecifiedConfig.keySet();
+ for (String key : userSchemas) {
+ // Backup data only if data exists in the current database
+ if (currentDatabaseConfig.containsKey(key)) {
+ SchemaData schemaDataInDb = currentDatabaseConfig.get(key);
+ Map<String, TableElement> currentDbTableMap = schemaDataInDb.getTableMap();
+ Set<String> currentDbTableNames = currentDbTableMap.keySet();
- SchemaData schemaData = userSpecifiedConfig.get(key);
- Set<String> tableNamesToBackup = schemaData.getTablesToBackup();
- for (String tableName : tableNamesToBackup) {
- // Check that table we want to backup exists in the database
- // before we add it to the list
- if (currentDbTableNames.contains(tableName)) {
- backupTables.add(tableName);
- } else {
- System.out.println("Table doesn't exist in Db. Unable to backup [" + tableName + "]");
- }
- }
- } else {
- System.out.println("Schema doesn't exist in Db. Unable to backup tables from schema [" + key + "]");
- }
- }
- return backupTables;
- }
+ SchemaData schemaData = userSpecifiedConfig.get(key);
+ Set<String> tableNamesToBackup = schemaData.getTablesToBackup();
+ for (String tableName : tableNamesToBackup) {
+ // Check that table we want to backup exists in the database
+ // before we add it to the list
+ if (currentDbTableNames.contains(tableName)) {
+ backupTables.add(tableName);
+ } else {
+ System.out.println("Table doesn't exist in Db. Unable to backup [" + tableName + "]");
+ }
+ }
+ } else {
+ System.out.println("Schema doesn't exist in Db. Unable to backup tables from schema [" + key + "]");
+ }
+ }
+ return backupTables;
+ }
- private void clearBackupDirectory() {
- if (backupDirectory != null && backupDirectory.exists() && backupDirectory.canWrite()) {
- File[] fileList = backupDirectory.listFiles();
- for (File fileToDelete : fileList) {
- fileToDelete.delete();
- }
- backupDirectory.delete();
- backupDirectory.mkdirs();
- }
- }
+ private void clearBackupDirectory() {
+ if (backupDirectory != null && backupDirectory.exists() && backupDirectory.canWrite()) {
+ File[] fileList = backupDirectory.listFiles();
+ for (File fileToDelete : fileList) {
+ fileToDelete.delete();
+ }
+ backupDirectory.delete();
+ backupDirectory.mkdirs();
+ }
+ }
} \ No newline at end of file
diff --git a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/CreateSchemaOperation.java b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/CreateSchemaOperation.java
index c05006d8e7f..c0987f3063b 100644
--- a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/CreateSchemaOperation.java
+++ b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/CreateSchemaOperation.java
@@ -29,30 +29,30 @@ import org.eclipse.osee.framework.database.core.SupportedDatabase;
* @author Roberto E. Escobar
*/
public class CreateSchemaOperation extends AbstractDbTxOperation {
- private final Map<String, SchemaData> userSchema;
- private final Map<String, SchemaData> dbSchema;
+ private final Map<String, SchemaData> userSchema;
+ private final Map<String, SchemaData> dbSchema;
- public CreateSchemaOperation(IOseeDatabaseServiceProvider provider, Map<String, SchemaData> userSchema, Map<String, SchemaData> dbSchema) {
- super(provider, "Create Schema", Activator.PLUGIN_ID);
- this.userSchema = userSchema;
- this.dbSchema = dbSchema;
- }
+ public CreateSchemaOperation(IOseeDatabaseServiceProvider provider, Map<String, SchemaData> userSchema, Map<String, SchemaData> dbSchema) {
+ super(provider, "Create Schema", Activator.PLUGIN_ID);
+ this.userSchema = userSchema;
+ this.dbSchema = dbSchema;
+ }
- @Override
- protected void doTxWork(IProgressMonitor monitor, OseeConnection connection) throws OseeCoreException {
- DatabaseMetaData metaData = connection.getMetaData();
- SqlManager sqlManager = SqlFactory.getSqlManager(metaData);
- SchemaSqlUtil dbInit = new SchemaSqlUtil(sqlManager);
+ @Override
+ protected void doTxWork(IProgressMonitor monitor, OseeConnection connection) throws OseeCoreException {
+ DatabaseMetaData metaData = connection.getMetaData();
+ SqlManager sqlManager = SqlFactory.getSqlManager(metaData);
+ SchemaSqlUtil dbInit = new SchemaSqlUtil(sqlManager);
- Set<String> schemas = userSchema.keySet();
- dbInit.dropIndices(schemas, userSchema, dbSchema);
- dbInit.dropTables(schemas, userSchema, dbSchema);
- if (SupportedDatabase.isDatabaseType(metaData, SupportedDatabase.postgresql)) {
- dbInit.dropSchema(schemas);
- dbInit.createSchema(schemas);
- }
- dbInit.addTables(schemas, userSchema);
- dbInit.addIndices(schemas, userSchema);
- }
+ Set<String> schemas = userSchema.keySet();
+ dbInit.dropIndices(schemas, userSchema, dbSchema);
+ dbInit.dropTables(schemas, userSchema, dbSchema);
+ if (SupportedDatabase.isDatabaseType(metaData, SupportedDatabase.postgresql)) {
+ dbInit.dropSchema(schemas);
+ dbInit.createSchema(schemas);
+ }
+ dbInit.addTables(schemas, userSchema);
+ dbInit.addIndices(schemas, userSchema);
+ }
}
diff --git a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/DatabaseDataExtractor.java b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/DatabaseDataExtractor.java
index 106fcd70b13..d37c37a71cd 100644
--- a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/DatabaseDataExtractor.java
+++ b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/DatabaseDataExtractor.java
@@ -40,7 +40,7 @@ import org.eclipse.osee.framework.core.datastore.schema.data.TableElement.TableD
import org.eclipse.osee.framework.core.datastore.schema.data.TableElement.TableTags;
import org.eclipse.osee.framework.core.exception.OseeDataStoreException;
import org.eclipse.osee.framework.core.operation.AbstractOperation;
-import org.eclipse.osee.framework.database.IOseeDatabaseServiceProvider;
+import org.eclipse.osee.framework.database.IOseeDatabaseService;
import org.eclipse.osee.framework.database.core.IOseeStatement;
import org.eclipse.osee.framework.database.core.SQL3DataType;
import org.eclipse.osee.framework.database.core.SupportedDatabase;
@@ -52,201 +52,202 @@ import org.eclipse.osee.framework.logging.OseeLog;
*/
public class DatabaseDataExtractor extends AbstractOperation {
- private static final String SQL_WILD_QUERY = "SELECT * FROM ";
- private final Set<String> schemas;
- private final File directory;
- private final List<Thread> workerThreads;
- private final Set<String> extractTables;
-
- private class ColumnInfo {
- String name;
- SQL3DataType type;
- }
-
- private final IOseeDatabaseServiceProvider provider;
-
- public DatabaseDataExtractor(IOseeDatabaseServiceProvider provider, Set<String> schemas, File directory) throws OseeDataStoreException {
- super("Extract Database Data", Activator.PLUGIN_ID);
- this.provider = provider;
- this.schemas = schemas;
- this.directory = directory;
- this.workerThreads = new ArrayList<Thread>();
- this.extractTables = new TreeSet<String>();
- }
-
- public void addTableNameToExtract(String fullyQualifiedTableName) {
- this.extractTables.add(fullyQualifiedTableName);
- }
-
- public void clearFilter() {
- this.extractTables.clear();
- }
-
- @Override
- protected void doWork(IProgressMonitor monitor) throws Exception {
- FileUtility.setupDirectoryForWrite(directory);
-
- Map<String, SchemaData> schemaDataMap = new HashMap<String, SchemaData>();
- ExtractDatabaseSchemaOperation operation = new ExtractDatabaseSchemaOperation(provider, schemas, schemaDataMap);
- doSubWork(operation, monitor, 0.20);
-
- Set<String> schemaKeys = schemaDataMap.keySet();
- for (String schema : schemaKeys) {
- SchemaData schemaData = schemaDataMap.get(schema);
-
- List<TableElement> tables = schemaData.getTablesOrderedByDependency();
- for (TableElement table : tables) {
-
- boolean extract = true;
- // only extract items in filter since filter was set with data
- if (this.extractTables != null && this.extractTables.size() > 0) {
- extract = extractTables.contains(table.getFullyQualifiedTableName());
- }
-
- if (extract) {
- DataExtractorThread workerThread = new DataExtractorThread(table);
- workerThreads.add(workerThread);
- workerThread.start();
- }
- }
- }
- }
-
- private class DataExtractorThread extends Thread {
- private final TableElement table;
-
- public DataExtractorThread(TableElement table) {
- this.table = table;
- setName(table.getName() + " Extractor");
- }
-
- @Override
- public void run() {
- IOseeStatement chStmt = null;
- OutputStream outputStream = null;
- try {
- chStmt = provider.getOseeDatabaseService().getStatement();
- String fileName = table.getFullyQualifiedTableName() + FileUtility.DB_DATA_EXTENSION;
- outputStream = new BufferedOutputStream(new FileOutputStream(new File(directory, fileName)));
-
- try {
- chStmt.runPreparedQuery(SQL_WILD_QUERY + table.getFullyQualifiedTableName());
- } catch (OseeDataStoreException ex) {
- chStmt.runPreparedQuery(SQL_WILD_QUERY + table.getName());
- }
-
- buildXml(chStmt, table, outputStream);
- } catch (Exception ex) {
- OseeLog.log(Activator.class, Level.SEVERE,
- "Error Processing Table [ " + table.getSchema() + "." + table.getName() + " ] Data ", ex);
- } finally {
- Lib.close(chStmt);
- Lib.close(outputStream);
- }
- }
- }
-
- public void waitForWorkerThreads() {
- for (Thread worker : workerThreads) {
- try {
- worker.join();
- } catch (InterruptedException ex) {
- OseeLog.log(Activator.class, Level.SEVERE, "Thread [" + worker.getName() + "] was Interrupted. ", ex);
- }
- }
- }
-
- private void buildXml(IOseeStatement chStmt, TableElement table, OutputStream outputStream) throws Exception {
- ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();
- int numberOfColumns = chStmt.getColumnCount();
- for (int index = 1; index <= numberOfColumns; index++) {
- ColumnInfo columnInfo = new ColumnInfo();
- columnInfo.name = chStmt.getColumnName(index);
- columnInfo.name = columnInfo.name.toUpperCase();
-
- int dataType = chStmt.getColumnType(index);
- if (chStmt.isDatabaseType(SupportedDatabase.foxpro)) {
- if (dataType == Types.CHAR) {
- dataType = Types.VARCHAR;
- }
- }
- columnInfo.type = SQL3DataType.get(dataType);
- columns.add(columnInfo);
- }
-
- XMLOutputFactory factory = XMLOutputFactory.newInstance();
- XMLStreamWriter writer = factory.createXMLStreamWriter(outputStream);
- writer.writeStartDocument("UTF-8", "1.0");
- writer.writeStartElement(TableTags.Table.name());
- writer.writeAttribute(TableDescriptionFields.schema.name(), table.getSchema());
- writer.writeAttribute(TableDescriptionFields.name.name(), table.getName());
-
- for (ColumnInfo info : columns) {
- writer.writeStartElement(TableTags.ColumnInfo.name());
- writer.writeAttribute(ColumnFields.id.name(), info.name);
- writer.writeAttribute(ColumnFields.type.name(), info.type.name());
- writer.writeEndElement();
- }
-
- while (chStmt.next()) {
- writer.writeStartElement(TableTags.Row.name());
- for (ColumnInfo column : columns) {
- String columnValue;
- switch (column.type) {
- case BIGINT:
- BigDecimal bigD = chStmt.getBigDecimal(column.name);
- columnValue = bigD != null ? bigD.toString() : "";
- break;
- case DATE:
- Date date = chStmt.getDate(column.name);
- columnValue = date != null ? date.toString() : "";
- break;
- case TIME:
- Time time = chStmt.getTime(column.name);
- columnValue = time != null ? time.toString() : "";
- break;
- case TIMESTAMP:
- Timestamp timestamp = chStmt.getTimestamp(column.name);
- columnValue = timestamp != null ? timestamp.toString() : "";
- break;
- default:
- columnValue = chStmt.getString(column.name);
- columnValue = handleSpecialCharacters(columnValue);
- break;
- }
- writer.writeAttribute(column.name, (columnValue != null ? columnValue : ""));
- }
- writer.writeEndElement();
- }
- writer.writeEndElement();
- writer.writeEndDocument();
- writer.flush();
- }
-
- private String handleSpecialCharacters(String value) {
- // \0 An ASCII 0 (NUL) character.
- // '' A single quote (�'�) character.
- // \b A backspace character.
- // \n A newline (linefeed) character.
- // \r A carriage return character.
- // \t A tab character.
- // \Z ASCII 26 (Control-Z). See note following the table.
-
- if (value != null) {
-
- value = value.replaceAll("\0", "");
- value = value.replaceAll("'", "''");
- // value = value.replaceAll("\"", "\\\\\""); No need to do this.
- Pattern pattern =
- Pattern.compile("[^" + "a-zA-Z0-9" + "!@#$%\\^&*\\(\\)" + "+ _.-=" + "\'\"<>{}\\[\\]|:;,\n\r\t\b?/`~\\\\]+");
- Matcher matcher = pattern.matcher(value);
-
- while (matcher.find()) {
- // System.out.println("Matcher: [" + matcher.group() + "]");
- value = value.replace(matcher.group(), "");
- }
- }
- return value;
- }
+ private static final String SQL_WILD_QUERY = "SELECT * FROM ";
+ private final Set<String> schemas;
+ private final File directory;
+ private final List<Thread> workerThreads;
+ private final Set<String> extractTables;
+
+ private class ColumnInfo {
+ String name;
+ SQL3DataType type;
+ }
+
+ private final IOseeDatabaseService databaseService;
+
+ public DatabaseDataExtractor(IOseeDatabaseService databaseService, Set<String> schemas, File directory) throws OseeDataStoreException {
+ super("Extract Database Data", Activator.PLUGIN_ID);
+ this.databaseService = databaseService;
+ this.schemas = schemas;
+ this.directory = directory;
+ this.workerThreads = new ArrayList<Thread>();
+ this.extractTables = new TreeSet<String>();
+ }
+
+ public void addTableNameToExtract(String fullyQualifiedTableName) {
+ this.extractTables.add(fullyQualifiedTableName);
+ }
+
+ public void clearFilter() {
+ this.extractTables.clear();
+ }
+
+ @Override
+ protected void doWork(IProgressMonitor monitor) throws Exception {
+ FileUtility.setupDirectoryForWrite(directory);
+
+ Map<String, SchemaData> schemaDataMap = new HashMap<String, SchemaData>();
+ ExtractDatabaseSchemaOperation operation =
+ new ExtractDatabaseSchemaOperation(databaseService, schemas, schemaDataMap);
+ doSubWork(operation, monitor, 0.20);
+
+ Set<String> schemaKeys = schemaDataMap.keySet();
+ for (String schema : schemaKeys) {
+ SchemaData schemaData = schemaDataMap.get(schema);
+
+ List<TableElement> tables = schemaData.getTablesOrderedByDependency();
+ for (TableElement table : tables) {
+
+ boolean extract = true;
+ // only extract items in filter since filter was set with data
+ if (this.extractTables != null && this.extractTables.size() > 0) {
+ extract = extractTables.contains(table.getFullyQualifiedTableName());
+ }
+
+ if (extract) {
+ DataExtractorThread workerThread = new DataExtractorThread(table);
+ workerThreads.add(workerThread);
+ workerThread.start();
+ }
+ }
+ }
+ }
+
+ private class DataExtractorThread extends Thread {
+ private final TableElement table;
+
+ public DataExtractorThread(TableElement table) {
+ this.table = table;
+ setName(table.getName() + " Extractor");
+ }
+
+ @Override
+ public void run() {
+ IOseeStatement chStmt = null;
+ OutputStream outputStream = null;
+ try {
+ chStmt = databaseService.getStatement();
+ String fileName = table.getFullyQualifiedTableName() + FileUtility.DB_DATA_EXTENSION;
+ outputStream = new BufferedOutputStream(new FileOutputStream(new File(directory, fileName)));
+
+ try {
+ chStmt.runPreparedQuery(SQL_WILD_QUERY + table.getFullyQualifiedTableName());
+ } catch (OseeDataStoreException ex) {
+ chStmt.runPreparedQuery(SQL_WILD_QUERY + table.getName());
+ }
+
+ buildXml(chStmt, table, outputStream);
+ } catch (Exception ex) {
+ OseeLog.log(Activator.class, Level.SEVERE,
+ "Error Processing Table [ " + table.getSchema() + "." + table.getName() + " ] Data ", ex);
+ } finally {
+ Lib.close(chStmt);
+ Lib.close(outputStream);
+ }
+ }
+ }
+
+ public void waitForWorkerThreads() {
+ for (Thread worker : workerThreads) {
+ try {
+ worker.join();
+ } catch (InterruptedException ex) {
+ OseeLog.log(Activator.class, Level.SEVERE, "Thread [" + worker.getName() + "] was Interrupted. ", ex);
+ }
+ }
+ }
+
+ private void buildXml(IOseeStatement chStmt, TableElement table, OutputStream outputStream) throws Exception {
+ ArrayList<ColumnInfo> columns = new ArrayList<ColumnInfo>();
+ int numberOfColumns = chStmt.getColumnCount();
+ for (int index = 1; index <= numberOfColumns; index++) {
+ ColumnInfo columnInfo = new ColumnInfo();
+ columnInfo.name = chStmt.getColumnName(index);
+ columnInfo.name = columnInfo.name.toUpperCase();
+
+ int dataType = chStmt.getColumnType(index);
+ if (chStmt.isDatabaseType(SupportedDatabase.foxpro)) {
+ if (dataType == Types.CHAR) {
+ dataType = Types.VARCHAR;
+ }
+ }
+ columnInfo.type = SQL3DataType.get(dataType);
+ columns.add(columnInfo);
+ }
+
+ XMLOutputFactory factory = XMLOutputFactory.newInstance();
+ XMLStreamWriter writer = factory.createXMLStreamWriter(outputStream);
+ writer.writeStartDocument("UTF-8", "1.0");
+ writer.writeStartElement(TableTags.Table.name());
+ writer.writeAttribute(TableDescriptionFields.schema.name(), table.getSchema());
+ writer.writeAttribute(TableDescriptionFields.name.name(), table.getName());
+
+ for (ColumnInfo info : columns) {
+ writer.writeStartElement(TableTags.ColumnInfo.name());
+ writer.writeAttribute(ColumnFields.id.name(), info.name);
+ writer.writeAttribute(ColumnFields.type.name(), info.type.name());
+ writer.writeEndElement();
+ }
+
+ while (chStmt.next()) {
+ writer.writeStartElement(TableTags.Row.name());
+ for (ColumnInfo column : columns) {
+ String columnValue;
+ switch (column.type) {
+ case BIGINT:
+ BigDecimal bigD = chStmt.getBigDecimal(column.name);
+ columnValue = bigD != null ? bigD.toString() : "";
+ break;
+ case DATE:
+ Date date = chStmt.getDate(column.name);
+ columnValue = date != null ? date.toString() : "";
+ break;
+ case TIME:
+ Time time = chStmt.getTime(column.name);
+ columnValue = time != null ? time.toString() : "";
+ break;
+ case TIMESTAMP:
+ Timestamp timestamp = chStmt.getTimestamp(column.name);
+ columnValue = timestamp != null ? timestamp.toString() : "";
+ break;
+ default:
+ columnValue = chStmt.getString(column.name);
+ columnValue = handleSpecialCharacters(columnValue);
+ break;
+ }
+ writer.writeAttribute(column.name, (columnValue != null ? columnValue : ""));
+ }
+ writer.writeEndElement();
+ }
+ writer.writeEndElement();
+ writer.writeEndDocument();
+ writer.flush();
+ }
+
+ private String handleSpecialCharacters(String value) {
+ // \0 An ASCII 0 (NUL) character.
+ // '' A single quote (�'�) character.
+ // \b A backspace character.
+ // \n A newline (linefeed) character.
+ // \r A carriage return character.
+ // \t A tab character.
+ // \Z ASCII 26 (Control-Z). See note following the table.
+
+ if (value != null) {
+
+ value = value.replaceAll("\0", "");
+ value = value.replaceAll("'", "''");
+ // value = value.replaceAll("\"", "\\\\\""); No need to do this.
+ Pattern pattern =
+ Pattern.compile("[^" + "a-zA-Z0-9" + "!@#$%\\^&*\\(\\)" + "+ _.-=" + "\'\"<>{}\\[\\]|:;,\n\r\t\b?/`~\\\\]+");
+ Matcher matcher = pattern.matcher(value);
+
+ while (matcher.find()) {
+ // System.out.println("Matcher: [" + matcher.group() + "]");
+ value = value.replace(matcher.group(), "");
+ }
+ }
+ return value;
+ }
} \ No newline at end of file
diff --git a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ExtractDatabaseSchemaOperation.java b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ExtractDatabaseSchemaOperation.java
index f18f8e2b8cc..f6c34764cf8 100644
--- a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ExtractDatabaseSchemaOperation.java
+++ b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ExtractDatabaseSchemaOperation.java
@@ -25,6 +25,7 @@ import java.util.regex.Pattern;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.osee.framework.core.datastore.internal.Activator;
import org.eclipse.osee.framework.core.datastore.schema.data.AppliesToClause;
+import org.eclipse.osee.framework.core.datastore.schema.data.AppliesToClause.OrderType;
import org.eclipse.osee.framework.core.datastore.schema.data.ColumnMetadata;
import org.eclipse.osee.framework.core.datastore.schema.data.ConstraintElement;
import org.eclipse.osee.framework.core.datastore.schema.data.ConstraintFactory;
@@ -32,16 +33,14 @@ import org.eclipse.osee.framework.core.datastore.schema.data.ConstraintTypes;
import org.eclipse.osee.framework.core.datastore.schema.data.ForeignKey;
import org.eclipse.osee.framework.core.datastore.schema.data.IndexElement;
import org.eclipse.osee.framework.core.datastore.schema.data.ReferenceClause;
-import org.eclipse.osee.framework.core.datastore.schema.data.SchemaData;
-import org.eclipse.osee.framework.core.datastore.schema.data.TableElement;
-import org.eclipse.osee.framework.core.datastore.schema.data.AppliesToClause.OrderType;
import org.eclipse.osee.framework.core.datastore.schema.data.ReferenceClause.OnDeleteEnum;
import org.eclipse.osee.framework.core.datastore.schema.data.ReferenceClause.OnUpdateEnum;
+import org.eclipse.osee.framework.core.datastore.schema.data.SchemaData;
+import org.eclipse.osee.framework.core.datastore.schema.data.TableElement;
import org.eclipse.osee.framework.core.datastore.schema.data.TableElement.ColumnFields;
import org.eclipse.osee.framework.core.datastore.schema.data.TableElement.TableDescriptionFields;
import org.eclipse.osee.framework.core.operation.AbstractOperation;
import org.eclipse.osee.framework.database.IOseeDatabaseService;
-import org.eclipse.osee.framework.database.IOseeDatabaseServiceProvider;
import org.eclipse.osee.framework.database.core.OseeConnection;
import org.eclipse.osee.framework.database.core.SQL3DataType;
import org.eclipse.osee.framework.database.core.SupportedDatabase;
@@ -50,438 +49,437 @@ import org.eclipse.osee.framework.database.core.SupportedDatabase;
* @author Roberto E. Escobar
*/
public class ExtractDatabaseSchemaOperation extends AbstractOperation {
- private static final String DEFAULT_FILTER = "BIN.*";
-
- private DatabaseMetaData dbData;
- private String dbName;
- private String dbVersion;
- private final Map<String, SchemaData> database;
- private final List<String> filter;
- private final Set<String> tablesToExtract;
- private final Set<String> schemas;
- private final IOseeDatabaseServiceProvider provider;
-
- public ExtractDatabaseSchemaOperation(IOseeDatabaseServiceProvider provider, Set<String> schemas, Map<String, SchemaData> schemaData) {
- super("Extract Database Schema", Activator.PLUGIN_ID);
- this.provider = provider;
- this.schemas = schemas;
- this.database = schemaData;
- this.filter = new ArrayList<String>();
- filter.add(DEFAULT_FILTER);
- this.tablesToExtract = new TreeSet<String>();
- }
-
- @Override
- protected void doWork(IProgressMonitor monitor) throws Exception {
- IOseeDatabaseService service = provider.getOseeDatabaseService();
- OseeConnection connection = service.getConnection();
- try {
- this.dbData = connection.getMetaData();
-
- this.dbName = dbData.getDatabaseProductName();
- this.dbVersion = dbData.getDatabaseProductVersion();
-
- for (String schema : schemas) {
- SchemaData dbTables = getTableInformation(schema);
- database.put(schema, dbTables);
- }
- } finally {
- connection.close();
- }
- }
-
- public void addToFilter(String value) {
- filter.add(value);
- }
-
- // private Set<String> getAllSchemas() throws SQLException {
- // ResultSet schemaResults = dbData.getSchemas();
- // Set<String> schemaSet = new TreeSet<String>();
- //
- // while (schemaResults.next()) {
- // String schema = schemaResults.getString("TABLE_SCHEM");
- // if (schema != null && !schema.equals("")) {
- // schemaSet.add(schema);
- // }
- // }
- // schemaResults.close();
- // return schemaSet;
- // }
-
- // /**
- // * Writes the XML files in the directory specified.
- // *
- // * @param directory The directory tow write the XML files.
- // * @throws IOException
- // */
- // public void writeToFile(File directory) throws IOException {
- // FileUtility.setupDirectoryForWrite(directory);
- // Set<String> keys = database.keySet();
- // for (String schema : keys) {
- // SchemaData tableData = database.get(schema);
- // File xmlFile = new File(directory.getAbsolutePath() + File.separator + schema + FileUtility.SCHEMA_EXTENSION);
- // try {
- // Jaxp.writeXmlDocument(tableData.getXmlDocument(), xmlFile);
- // } catch (Exception ex) {
- // OseeLog.log(Activator.class, Level.SEVERE, ex);
- // }
- // }
- // }
-
- @Override
- public String toString() {
- StringBuilder buffer = new StringBuilder();
- Set<String> keys = database.keySet();
- for (String schema : keys) {
- SchemaData tableData = database.get(schema);
- buffer.append(" Schema: \n");
- buffer.append(schema);
- buffer.append("\n");
- buffer.append(tableData.toString());
- }
- return String.format("Name: [%s]\tVer: [%s]\n%s", dbName, dbVersion, buffer);
- }
-
- private boolean isFiltered(String value) {
- for (String filterExpression : filter) {
- Pattern searchPattern = Pattern.compile(filterExpression, Pattern.DOTALL);
- Matcher matcher = searchPattern.matcher(value);
- if (matcher.find()) {
- return true;
- }
- }
- return false;
- }
-
- public void addTableToExtract(String fullyqualifiedTableName) {
- this.tablesToExtract.add(fullyqualifiedTableName);
- }
-
- public void clearTableFilter() {
- tablesToExtract.clear();
- }
-
- private SchemaData getTableInformation(String schemaPattern) throws Exception {
- SchemaData dbTables = new SchemaData();
- ResultSet tables = null;
- tables = dbData.getTables(null, null, null, new String[] {"TABLE"});
-
- while (tables.next()) {
- String tableName = tables.getString("TABLE_NAME").toUpperCase();
- String schemaName = tables.getString("TABLE_SCHEM");
- if (tableName != null && !isFiltered(tableName) && schemaName.equalsIgnoreCase(schemaPattern)) {
- boolean extract = true;
- if (this.tablesToExtract != null && this.tablesToExtract.size() > 0) {
- extract = tablesToExtract.contains(schemaPattern + "." + tableName);
- }
-
- if (extract) {
- TableElement tableEntry = new TableElement();
- tableEntry.addTableDescription(TableDescriptionFields.name, tableName);
- tableEntry.addTableDescription(TableDescriptionFields.schema, schemaName);
- getColumnInformation(tableEntry);
- getColumnPrimaryKey(tableEntry);
-
- if (!(SupportedDatabase.isDatabaseType(dbData, SupportedDatabase.foxpro) || SupportedDatabase.isDatabaseType(
- dbData, SupportedDatabase.postgresql))) {
- getColumnForeignKey(tableEntry);
- }
- getIndexInfo(tableEntry);
- dbTables.addTableDefinition(tableEntry);
- }
- }
- }
- tables.close();
- return dbTables;
- }
-
- private void getColumnInformation(TableElement aTable) throws Exception {
- ResultSet columns = null;
- try {
- columns = dbData.getColumns(null, aTable.getSchema(), aTable.getName(), null);
- } catch (SQLException ex) {
- columns = dbData.getColumns(null, null, aTable.getName(), null);
- }
- while (columns.next()) {
- String id = columns.getString("COLUMN_NAME");
- id = id.toUpperCase();
- ColumnMetadata column = new ColumnMetadata(id);
-
- int dataType = columns.getInt("DATA_TYPE");
- if (SupportedDatabase.isDatabaseType(dbData, SupportedDatabase.foxpro)) {
- if (dataType == Types.CHAR) {
- dataType = Types.VARCHAR;
- }
- }
- String dataTypeName = SQL3DataType.get(dataType).name();
- column.addColumnField(ColumnFields.type, dataTypeName);
-
- String defaultValue = "";
- int defaultType = columns.getInt("NULLABLE");
- switch (defaultType) {
- case java.sql.DatabaseMetaData.columnNoNulls:
- defaultValue = "not null";
- break;
- case java.sql.DatabaseMetaData.columnNullable:
- // Dont specify if Null - Let DB Decide.
- defaultValue = "";
- break;
- case java.sql.DatabaseMetaData.columnNullableUnknown:
- default:
- // Since unknown then don't specify
- defaultValue = "";
- break;
- }
- if (!defaultValue.equals("")) {
- column.addColumnField(ColumnFields.defaultValue, defaultValue);
- }
-
- if (!SupportedDatabase.isDatabaseType(dbData, SupportedDatabase.foxpro)) {
- // int dataType = columns.getInt("DATA_TYPE");
- switch (dataType) {
- case java.sql.Types.CHAR:
- case java.sql.Types.VARCHAR:
- String limits = columns.getString("COLUMN_SIZE");
- if (limits != null && !limits.equals("")) {
- column.addColumnField(ColumnFields.limits, limits);
- }
- break;
- case java.sql.Types.DECIMAL:
- case java.sql.Types.NUMERIC:
- limits = columns.getString("COLUMN_SIZE");
- String decimal = columns.getString("DECIMAL_DIGITS");
- if (decimal != null && !decimal.equals("")) {
- if (limits != null && !limits.equals("")) {
- limits += "," + decimal;
- }
- }
- if (limits != null && !limits.equals("")) {
- column.addColumnField(ColumnFields.limits, limits);
- }
- default:
- break;
- }
- } else {
- switch (dataType) {
- case java.sql.Types.CHAR:
- case java.sql.Types.VARCHAR:
- String limits = "255";
- if (limits != null && !limits.equals("")) {
- column.addColumnField(ColumnFields.limits, limits);
- }
- break;
- default:
- break;
- }
- }
- aTable.addColumn(column);
- }
- columns.close();
- }
-
- private void getColumnPrimaryKey(TableElement aTable) throws SQLException {
- ResultSet primaryKeys = null;
- try {
- primaryKeys = dbData.getPrimaryKeys(null, aTable.getSchema(), aTable.getName());
- } catch (SQLException ex) {
- primaryKeys = dbData.getPrimaryKeys(null, null, aTable.getName());
- }
- Map<String, Set<String>> constraintKeyMap = new HashMap<String, Set<String>>();
-
- while (primaryKeys.next()) {
- String column = primaryKeys.getString("COLUMN_NAME");
- String keyId = primaryKeys.getString("PK_NAME");
-
- if (keyId == null || keyId.equals("")) {
- keyId = column + "_PK";
- }
-
- if (!constraintKeyMap.containsKey(keyId)) {
- Set<String> set = new TreeSet<String>();
- set.add(column);
- constraintKeyMap.put(keyId, set);
- } else {
- Set<String> set = constraintKeyMap.get(keyId);
- if (!set.contains(column)) {
- set.add(column);
- }
- }
- }
-
- Set<String> keys = constraintKeyMap.keySet();
- for (String pk : keys) {
- ConstraintElement constraint =
- ConstraintFactory.getConstraint(ConstraintTypes.PRIMARY_KEY, aTable.getSchema(), pk, false);
- Set<String> columnSet = constraintKeyMap.get(pk);
- for (String column : columnSet) {
- constraint.addColumn(column);
- }
- aTable.addConstraint(constraint);
- }
- primaryKeys.close();
- }
-
- private void getColumnForeignKey(TableElement aTable) throws SQLException {
- ResultSet importedKeys = dbData.getImportedKeys(null, aTable.getSchema(), aTable.getName());
-
- while (importedKeys.next()) {
-
- String appliesToColumnId = importedKeys.getString("FKCOLUMN_NAME");
- String fkeyId = importedKeys.getString("FK_NAME");
- String fKeyAddress = importedKeys.getString("FKTABLE_SCHEM");
-
- String refersToTable = importedKeys.getString("PKTABLE_NAME");
- String refersToTableAddress = importedKeys.getString("PKTABLE_SCHEM");
- String referencesColumn = importedKeys.getString("PKCOLUMN_NAME");
-
- OnDeleteEnum onDeleteAction = OnDeleteEnum.UNSPECIFIED;
- String onDeleteRule = importedKeys.getString("DELETE_RULE");
- if (onDeleteRule != null && !onDeleteRule.equals("")) {
- // System.out.println("onDelete: " + onDeleteRule);
- int type = Integer.parseInt(onDeleteRule);
- switch (type) {
- case java.sql.DatabaseMetaData.importedKeyNoAction:
- onDeleteAction = OnDeleteEnum.NO_ACTION;
- break;
- case java.sql.DatabaseMetaData.importedKeyRestrict:
- onDeleteAction = OnDeleteEnum.RESTRICT;
- break;
- case java.sql.DatabaseMetaData.importedKeyCascade:
- onDeleteAction = OnDeleteEnum.CASCADE;
- break;
- case java.sql.DatabaseMetaData.importedKeySetNull:
- onDeleteAction = OnDeleteEnum.SET_NULL;
- break;
- case java.sql.DatabaseMetaData.importedKeySetDefault:
- default:
- onDeleteAction = OnDeleteEnum.UNSPECIFIED;
- break;
- }
- }
-
- OnUpdateEnum onUpdateAction = OnUpdateEnum.UNSPECIFIED;
- String onUpdateRule = importedKeys.getString("UPDATE_RULE");
- if (onUpdateRule != null && !onUpdateRule.equals("")) {
- // System.out.println("onUpdate: " + onUpdateRule);
- int type = Integer.parseInt(onUpdateRule);
- switch (type) {
- case java.sql.DatabaseMetaData.importedKeyNoAction:
- onUpdateAction = OnUpdateEnum.NO_ACTION;
- break;
- case java.sql.DatabaseMetaData.importedKeyRestrict:
- onUpdateAction = OnUpdateEnum.RESTRICT;
- break;
- case java.sql.DatabaseMetaData.importedKeyCascade:
- case java.sql.DatabaseMetaData.importedKeySetNull:
- case java.sql.DatabaseMetaData.importedKeySetDefault:
- default:
- onUpdateAction = OnUpdateEnum.UNSPECIFIED;
- break;
- }
- }
-
- boolean deferrable = false;
- String deferrabilityId = importedKeys.getString("DEFERRABILITY");
- if (deferrabilityId != null && !deferrabilityId.equals("")) {
- int type = Integer.parseInt(deferrabilityId);
- switch (type) {
- case java.sql.DatabaseMetaData.importedKeyInitiallyDeferred:
- case java.sql.DatabaseMetaData.importedKeyInitiallyImmediate:
- deferrable = true;
- break;
- case java.sql.DatabaseMetaData.importedKeyNotDeferrable:
- deferrable = false;
- break;
- default:
- deferrable = false;
- break;
- }
- }
-
- if (fKeyAddress == null || fKeyAddress.equals("")) {
- fKeyAddress = aTable.getSchema();
- }
-
- if (fkeyId == null || fkeyId.equals("")) {
- fkeyId = appliesToColumnId + "_FK";
- }
-
- if (refersToTableAddress == null || refersToTableAddress.equals("")) {
- refersToTableAddress = aTable.getSchema();
- }
-
- ConstraintElement constraint =
- ConstraintFactory.getConstraint(ConstraintTypes.FOREIGN_KEY, fKeyAddress, fkeyId, deferrable);
- constraint.addColumn(appliesToColumnId);
-
- ReferenceClause ref = new ReferenceClause(refersToTableAddress, refersToTable);
- ref.addColumn(referencesColumn);
-
- ref.setOnDeleteAction(onDeleteAction);
- ref.setOnUpdateAction(onUpdateAction);
-
- ((ForeignKey) constraint).addReference(ref);
-
- aTable.addConstraint(constraint);
- }
- importedKeys.close();
- }
-
- private void getIndexInfo(TableElement aTable) throws SQLException {
- ResultSet indexKeys = dbData.getIndexInfo(null, aTable.getSchema(), aTable.getName(), false, false);
- Pattern pattern = Pattern.compile("SQL\\d+");
-
- Map<String, Map<Integer, AppliesToClause>> indexMap = new HashMap<String, Map<Integer, AppliesToClause>>();
-
- while (indexKeys.next()) {
- String indexName = indexKeys.getString("INDEX_NAME");
-
- if (indexName != null && indexName.length() > 0) {
- Matcher matcher = pattern.matcher(indexName);
- if (!matcher.matches()) {
- if (indexKeys.getShort("TYPE") == DatabaseMetaData.tableIndexOther) {
-
- short ordinal = indexKeys.getShort("ORDINAL_POSITION");
- String columnName = indexKeys.getString("COLUMN_NAME");
-
- String orderTypeString = indexKeys.getString("ASC_OR_DESC");
- OrderType orderType = OrderType.Undefined;
- if (orderTypeString != null) {
- if (orderTypeString.equalsIgnoreCase("A")) {
- orderType = OrderType.Ascending;
- } else if (orderTypeString.equalsIgnoreCase("D")) {
- orderType = OrderType.Descending;
- }
- }
-
- Map<Integer, AppliesToClause> appliesTo = null;
- if (indexMap.containsKey(indexName)) {
- appliesTo = indexMap.get(indexName);
- } else {
- appliesTo = new HashMap<Integer, AppliesToClause>();
- indexMap.put(indexName, appliesTo);
- }
- appliesTo.put(new Integer(ordinal), new AppliesToClause(columnName, orderType));
- }
- }
- }
- }
- for (String indexName : indexMap.keySet()) {
- Map<Integer, AppliesToClause> clauseMap = indexMap.get(indexName);
- IndexElement element = new IndexElement(indexName);
-
- Set<Integer> index = clauseMap.keySet();
- Set<Integer> sortedIndex = new TreeSet<Integer>();
- for (Integer val : index) {
- sortedIndex.add(val);
- }
-
- for (Integer val : sortedIndex) {
- AppliesToClause clause = clauseMap.get(val);
- element.addAppliesTo(clause.getColumnName(), clause.getOrderType());
- }
- aTable.addIndexData(element);
- }
- indexKeys.close();
- }
+ private static final String DEFAULT_FILTER = "BIN.*";
+
+ private DatabaseMetaData dbData;
+ private String dbName;
+ private String dbVersion;
+ private final Map<String, SchemaData> database;
+ private final List<String> filter;
+ private final Set<String> tablesToExtract;
+ private final Set<String> schemas;
+ private final IOseeDatabaseService dbService;
+
+ public ExtractDatabaseSchemaOperation(IOseeDatabaseService dbService, Set<String> schemas, Map<String, SchemaData> schemaData) {
+ super("Extract Database Schema", Activator.PLUGIN_ID);
+ this.dbService = dbService;
+ this.schemas = schemas;
+ this.database = schemaData;
+ this.filter = new ArrayList<String>();
+ filter.add(DEFAULT_FILTER);
+ this.tablesToExtract = new TreeSet<String>();
+ }
+
+ @Override
+ protected void doWork(IProgressMonitor monitor) throws Exception {
+ OseeConnection connection = dbService.getConnection();
+ try {
+ this.dbData = connection.getMetaData();
+
+ this.dbName = dbData.getDatabaseProductName();
+ this.dbVersion = dbData.getDatabaseProductVersion();
+
+ for (String schema : schemas) {
+ SchemaData dbTables = getTableInformation(schema);
+ database.put(schema, dbTables);
+ }
+ } finally {
+ connection.close();
+ }
+ }
+
+ public void addToFilter(String value) {
+ filter.add(value);
+ }
+
+ // private Set<String> getAllSchemas() throws SQLException {
+ // ResultSet schemaResults = dbData.getSchemas();
+ // Set<String> schemaSet = new TreeSet<String>();
+ //
+ // while (schemaResults.next()) {
+ // String schema = schemaResults.getString("TABLE_SCHEM");
+ // if (schema != null && !schema.equals("")) {
+ // schemaSet.add(schema);
+ // }
+ // }
+ // schemaResults.close();
+ // return schemaSet;
+ // }
+
+ // /**
+ // * Writes the XML files in the directory specified.
+ // *
+ // * @param directory The directory tow write the XML files.
+ // * @throws IOException
+ // */
+ // public void writeToFile(File directory) throws IOException {
+ // FileUtility.setupDirectoryForWrite(directory);
+ // Set<String> keys = database.keySet();
+ // for (String schema : keys) {
+ // SchemaData tableData = database.get(schema);
+ // File xmlFile = new File(directory.getAbsolutePath() + File.separator + schema + FileUtility.SCHEMA_EXTENSION);
+ // try {
+ // Jaxp.writeXmlDocument(tableData.getXmlDocument(), xmlFile);
+ // } catch (Exception ex) {
+ // OseeLog.log(Activator.class, Level.SEVERE, ex);
+ // }
+ // }
+ // }
+
+ @Override
+ public String toString() {
+ StringBuilder buffer = new StringBuilder();
+ Set<String> keys = database.keySet();
+ for (String schema : keys) {
+ SchemaData tableData = database.get(schema);
+ buffer.append(" Schema: \n");
+ buffer.append(schema);
+ buffer.append("\n");
+ buffer.append(tableData.toString());
+ }
+ return String.format("Name: [%s]\tVer: [%s]\n%s", dbName, dbVersion, buffer);
+ }
+
+ private boolean isFiltered(String value) {
+ for (String filterExpression : filter) {
+ Pattern searchPattern = Pattern.compile(filterExpression, Pattern.DOTALL);
+ Matcher matcher = searchPattern.matcher(value);
+ if (matcher.find()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public void addTableToExtract(String fullyqualifiedTableName) {
+ this.tablesToExtract.add(fullyqualifiedTableName);
+ }
+
+ public void clearTableFilter() {
+ tablesToExtract.clear();
+ }
+
+ private SchemaData getTableInformation(String schemaPattern) throws Exception {
+ SchemaData dbTables = new SchemaData();
+ ResultSet tables = null;
+ tables = dbData.getTables(null, null, null, new String[] {"TABLE"});
+
+ while (tables.next()) {
+ String tableName = tables.getString("TABLE_NAME").toUpperCase();
+ String schemaName = tables.getString("TABLE_SCHEM");
+ if (tableName != null && !isFiltered(tableName) && schemaName.equalsIgnoreCase(schemaPattern)) {
+ boolean extract = true;
+ if (this.tablesToExtract != null && this.tablesToExtract.size() > 0) {
+ extract = tablesToExtract.contains(schemaPattern + "." + tableName);
+ }
+
+ if (extract) {
+ TableElement tableEntry = new TableElement();
+ tableEntry.addTableDescription(TableDescriptionFields.name, tableName);
+ tableEntry.addTableDescription(TableDescriptionFields.schema, schemaName);
+ getColumnInformation(tableEntry);
+ getColumnPrimaryKey(tableEntry);
+
+ if (!(SupportedDatabase.isDatabaseType(dbData, SupportedDatabase.foxpro) || SupportedDatabase.isDatabaseType(
+ dbData, SupportedDatabase.postgresql))) {
+ getColumnForeignKey(tableEntry);
+ }
+ getIndexInfo(tableEntry);
+ dbTables.addTableDefinition(tableEntry);
+ }
+ }
+ }
+ tables.close();
+ return dbTables;
+ }
+
+ private void getColumnInformation(TableElement aTable) throws Exception {
+ ResultSet columns = null;
+ try {
+ columns = dbData.getColumns(null, aTable.getSchema(), aTable.getName(), null);
+ } catch (SQLException ex) {
+ columns = dbData.getColumns(null, null, aTable.getName(), null);
+ }
+ while (columns.next()) {
+ String id = columns.getString("COLUMN_NAME");
+ id = id.toUpperCase();
+ ColumnMetadata column = new ColumnMetadata(id);
+
+ int dataType = columns.getInt("DATA_TYPE");
+ if (SupportedDatabase.isDatabaseType(dbData, SupportedDatabase.foxpro)) {
+ if (dataType == Types.CHAR) {
+ dataType = Types.VARCHAR;
+ }
+ }
+ String dataTypeName = SQL3DataType.get(dataType).name();
+ column.addColumnField(ColumnFields.type, dataTypeName);
+
+ String defaultValue = "";
+ int defaultType = columns.getInt("NULLABLE");
+ switch (defaultType) {
+ case java.sql.DatabaseMetaData.columnNoNulls:
+ defaultValue = "not null";
+ break;
+ case java.sql.DatabaseMetaData.columnNullable:
+ // Dont specify if Null - Let DB Decide.
+ defaultValue = "";
+ break;
+ case java.sql.DatabaseMetaData.columnNullableUnknown:
+ default:
+ // Since unknown then don't specify
+ defaultValue = "";
+ break;
+ }
+ if (!defaultValue.equals("")) {
+ column.addColumnField(ColumnFields.defaultValue, defaultValue);
+ }
+
+ if (!SupportedDatabase.isDatabaseType(dbData, SupportedDatabase.foxpro)) {
+ // int dataType = columns.getInt("DATA_TYPE");
+ switch (dataType) {
+ case java.sql.Types.CHAR:
+ case java.sql.Types.VARCHAR:
+ String limits = columns.getString("COLUMN_SIZE");
+ if (limits != null && !limits.equals("")) {
+ column.addColumnField(ColumnFields.limits, limits);
+ }
+ break;
+ case java.sql.Types.DECIMAL:
+ case java.sql.Types.NUMERIC:
+ limits = columns.getString("COLUMN_SIZE");
+ String decimal = columns.getString("DECIMAL_DIGITS");
+ if (decimal != null && !decimal.equals("")) {
+ if (limits != null && !limits.equals("")) {
+ limits += "," + decimal;
+ }
+ }
+ if (limits != null && !limits.equals("")) {
+ column.addColumnField(ColumnFields.limits, limits);
+ }
+ default:
+ break;
+ }
+ } else {
+ switch (dataType) {
+ case java.sql.Types.CHAR:
+ case java.sql.Types.VARCHAR:
+ String limits = "255";
+ if (limits != null && !limits.equals("")) {
+ column.addColumnField(ColumnFields.limits, limits);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ aTable.addColumn(column);
+ }
+ columns.close();
+ }
+
+ private void getColumnPrimaryKey(TableElement aTable) throws SQLException {
+ ResultSet primaryKeys = null;
+ try {
+ primaryKeys = dbData.getPrimaryKeys(null, aTable.getSchema(), aTable.getName());
+ } catch (SQLException ex) {
+ primaryKeys = dbData.getPrimaryKeys(null, null, aTable.getName());
+ }
+ Map<String, Set<String>> constraintKeyMap = new HashMap<String, Set<String>>();
+
+ while (primaryKeys.next()) {
+ String column = primaryKeys.getString("COLUMN_NAME");
+ String keyId = primaryKeys.getString("PK_NAME");
+
+ if (keyId == null || keyId.equals("")) {
+ keyId = column + "_PK";
+ }
+
+ if (!constraintKeyMap.containsKey(keyId)) {
+ Set<String> set = new TreeSet<String>();
+ set.add(column);
+ constraintKeyMap.put(keyId, set);
+ } else {
+ Set<String> set = constraintKeyMap.get(keyId);
+ if (!set.contains(column)) {
+ set.add(column);
+ }
+ }
+ }
+
+ Set<String> keys = constraintKeyMap.keySet();
+ for (String pk : keys) {
+ ConstraintElement constraint =
+ ConstraintFactory.getConstraint(ConstraintTypes.PRIMARY_KEY, aTable.getSchema(), pk, false);
+ Set<String> columnSet = constraintKeyMap.get(pk);
+ for (String column : columnSet) {
+ constraint.addColumn(column);
+ }
+ aTable.addConstraint(constraint);
+ }
+ primaryKeys.close();
+ }
+
+ private void getColumnForeignKey(TableElement aTable) throws SQLException {
+ ResultSet importedKeys = dbData.getImportedKeys(null, aTable.getSchema(), aTable.getName());
+
+ while (importedKeys.next()) {
+
+ String appliesToColumnId = importedKeys.getString("FKCOLUMN_NAME");
+ String fkeyId = importedKeys.getString("FK_NAME");
+ String fKeyAddress = importedKeys.getString("FKTABLE_SCHEM");
+
+ String refersToTable = importedKeys.getString("PKTABLE_NAME");
+ String refersToTableAddress = importedKeys.getString("PKTABLE_SCHEM");
+ String referencesColumn = importedKeys.getString("PKCOLUMN_NAME");
+
+ OnDeleteEnum onDeleteAction = OnDeleteEnum.UNSPECIFIED;
+ String onDeleteRule = importedKeys.getString("DELETE_RULE");
+ if (onDeleteRule != null && !onDeleteRule.equals("")) {
+ // System.out.println("onDelete: " + onDeleteRule);
+ int type = Integer.parseInt(onDeleteRule);
+ switch (type) {
+ case java.sql.DatabaseMetaData.importedKeyNoAction:
+ onDeleteAction = OnDeleteEnum.NO_ACTION;
+ break;
+ case java.sql.DatabaseMetaData.importedKeyRestrict:
+ onDeleteAction = OnDeleteEnum.RESTRICT;
+ break;
+ case java.sql.DatabaseMetaData.importedKeyCascade:
+ onDeleteAction = OnDeleteEnum.CASCADE;
+ break;
+ case java.sql.DatabaseMetaData.importedKeySetNull:
+ onDeleteAction = OnDeleteEnum.SET_NULL;
+ break;
+ case java.sql.DatabaseMetaData.importedKeySetDefault:
+ default:
+ onDeleteAction = OnDeleteEnum.UNSPECIFIED;
+ break;
+ }
+ }
+
+ OnUpdateEnum onUpdateAction = OnUpdateEnum.UNSPECIFIED;
+ String onUpdateRule = importedKeys.getString("UPDATE_RULE");
+ if (onUpdateRule != null && !onUpdateRule.equals("")) {
+ // System.out.println("onUpdate: " + onUpdateRule);
+ int type = Integer.parseInt(onUpdateRule);
+ switch (type) {
+ case java.sql.DatabaseMetaData.importedKeyNoAction:
+ onUpdateAction = OnUpdateEnum.NO_ACTION;
+ break;
+ case java.sql.DatabaseMetaData.importedKeyRestrict:
+ onUpdateAction = OnUpdateEnum.RESTRICT;
+ break;
+ case java.sql.DatabaseMetaData.importedKeyCascade:
+ case java.sql.DatabaseMetaData.importedKeySetNull:
+ case java.sql.DatabaseMetaData.importedKeySetDefault:
+ default:
+ onUpdateAction = OnUpdateEnum.UNSPECIFIED;
+ break;
+ }
+ }
+
+ boolean deferrable = false;
+ String deferrabilityId = importedKeys.getString("DEFERRABILITY");
+ if (deferrabilityId != null && !deferrabilityId.equals("")) {
+ int type = Integer.parseInt(deferrabilityId);
+ switch (type) {
+ case java.sql.DatabaseMetaData.importedKeyInitiallyDeferred:
+ case java.sql.DatabaseMetaData.importedKeyInitiallyImmediate:
+ deferrable = true;
+ break;
+ case java.sql.DatabaseMetaData.importedKeyNotDeferrable:
+ deferrable = false;
+ break;
+ default:
+ deferrable = false;
+ break;
+ }
+ }
+
+ if (fKeyAddress == null || fKeyAddress.equals("")) {
+ fKeyAddress = aTable.getSchema();
+ }
+
+ if (fkeyId == null || fkeyId.equals("")) {
+ fkeyId = appliesToColumnId + "_FK";
+ }
+
+ if (refersToTableAddress == null || refersToTableAddress.equals("")) {
+ refersToTableAddress = aTable.getSchema();
+ }
+
+ ConstraintElement constraint =
+ ConstraintFactory.getConstraint(ConstraintTypes.FOREIGN_KEY, fKeyAddress, fkeyId, deferrable);
+ constraint.addColumn(appliesToColumnId);
+
+ ReferenceClause ref = new ReferenceClause(refersToTableAddress, refersToTable);
+ ref.addColumn(referencesColumn);
+
+ ref.setOnDeleteAction(onDeleteAction);
+ ref.setOnUpdateAction(onUpdateAction);
+
+ ((ForeignKey) constraint).addReference(ref);
+
+ aTable.addConstraint(constraint);
+ }
+ importedKeys.close();
+ }
+
+ private void getIndexInfo(TableElement aTable) throws SQLException {
+ ResultSet indexKeys = dbData.getIndexInfo(null, aTable.getSchema(), aTable.getName(), false, false);
+ Pattern pattern = Pattern.compile("SQL\\d+");
+
+ Map<String, Map<Integer, AppliesToClause>> indexMap = new HashMap<String, Map<Integer, AppliesToClause>>();
+
+ while (indexKeys.next()) {
+ String indexName = indexKeys.getString("INDEX_NAME");
+
+ if (indexName != null && indexName.length() > 0) {
+ Matcher matcher = pattern.matcher(indexName);
+ if (!matcher.matches()) {
+ if (indexKeys.getShort("TYPE") == DatabaseMetaData.tableIndexOther) {
+
+ short ordinal = indexKeys.getShort("ORDINAL_POSITION");
+ String columnName = indexKeys.getString("COLUMN_NAME");
+
+ String orderTypeString = indexKeys.getString("ASC_OR_DESC");
+ OrderType orderType = OrderType.Undefined;
+ if (orderTypeString != null) {
+ if (orderTypeString.equalsIgnoreCase("A")) {
+ orderType = OrderType.Ascending;
+ } else if (orderTypeString.equalsIgnoreCase("D")) {
+ orderType = OrderType.Descending;
+ }
+ }
+
+ Map<Integer, AppliesToClause> appliesTo = null;
+ if (indexMap.containsKey(indexName)) {
+ appliesTo = indexMap.get(indexName);
+ } else {
+ appliesTo = new HashMap<Integer, AppliesToClause>();
+ indexMap.put(indexName, appliesTo);
+ }
+ appliesTo.put(new Integer(ordinal), new AppliesToClause(columnName, orderType));
+ }
+ }
+ }
+ }
+ for (String indexName : indexMap.keySet()) {
+ Map<Integer, AppliesToClause> clauseMap = indexMap.get(indexName);
+ IndexElement element = new IndexElement(indexName);
+
+ Set<Integer> index = clauseMap.keySet();
+ Set<Integer> sortedIndex = new TreeSet<Integer>();
+ for (Integer val : index) {
+ sortedIndex.add(val);
+ }
+
+ for (Integer val : sortedIndex) {
+ AppliesToClause clause = clauseMap.get(val);
+ element.addAppliesTo(clause.getColumnName(), clause.getOrderType());
+ }
+ aTable.addIndexData(element);
+ }
+ indexKeys.close();
+ }
} \ No newline at end of file
diff --git a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ImportDataFromDbServiceOperation.java b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ImportDataFromDbServiceOperation.java
index 04fedc3ef3f..a7517e42709 100644
--- a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ImportDataFromDbServiceOperation.java
+++ b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/ImportDataFromDbServiceOperation.java
@@ -22,7 +22,7 @@ import org.eclipse.osee.framework.core.datastore.internal.Activator;
import org.eclipse.osee.framework.core.datastore.schema.data.SchemaData;
import org.eclipse.osee.framework.core.datastore.schema.data.TableElement;
import org.eclipse.osee.framework.core.operation.AbstractOperation;
-import org.eclipse.osee.framework.database.IOseeDatabaseServiceProvider;
+import org.eclipse.osee.framework.database.IOseeDatabaseService;
import org.eclipse.osee.framework.database.core.DatabaseInfoManager;
import org.eclipse.osee.framework.jdk.core.util.Strings;
@@ -31,157 +31,159 @@ import org.eclipse.osee.framework.jdk.core.util.Strings;
*/
public class ImportDataFromDbServiceOperation extends AbstractOperation {
- private static final File backupDirectory = new File("BackupDirectory");
-
- private final Map<String, SchemaData> userSpecifiedConfig;
- private final String tableImportSource;
- private final IOseeDatabaseServiceProvider provider;
-
- public ImportDataFromDbServiceOperation(IOseeDatabaseServiceProvider provider, Map<String, SchemaData> userSpecifiedConfig, String tableImportSource) {
- super("Import Data from Db Service", Activator.PLUGIN_ID);
- this.provider = provider;
- this.userSpecifiedConfig = userSpecifiedConfig;
- this.tableImportSource = tableImportSource;
- }
-
- @Override
- protected void doWork(IProgressMonitor monitor) throws Exception {
- Set<String> importConnections = getImportConnections();
- for (String importFromDbService : importConnections) {
- System.out.println("Import Table Data from Db: " + importFromDbService);
-
- IDatabaseInfo dbInfo = DatabaseInfoManager.getDataStoreById(importFromDbService);
- System.out.println("Gathering information from ..." + importFromDbService);
-
- String userName = dbInfo.getDatabaseLoginName();
- if (userName != null && !userName.equals("")) {
-
- Set<String> schemasToGet = new TreeSet<String>();
- schemasToGet.add(userName.toUpperCase());
-
- Map<String, Set<String>> dataToImport = getTablesToImport(monitor, userName.toUpperCase(), schemasToGet);
- if (dataToImport.size() > 0) {
- System.out.println(dataToImport.toString().replaceAll(", ", "\n"));
- makeBackupDirectoryIfItDoesntExist();
-
- System.out.println("Backing up Files to: " + backupDirectory.getAbsolutePath());
- DatabaseDataExtractor dbDataExtractor = new DatabaseDataExtractor(provider, schemasToGet, backupDirectory);
-
- Set<String> tablesToImport;
- if (importFromDbService.equals(determineDefaultConnection())) {
- tablesToImport = dataToImport.get(tableImportSource);
- } else {
- tablesToImport = dataToImport.get(importFromDbService);
- }
-
- for (String importTable : tablesToImport) {
- dbDataExtractor.addTableNameToExtract(importTable);
- }
- doSubWork(dbDataExtractor, monitor, 0.10);
- dbDataExtractor.waitForWorkerThreads();
-
- prepareFilesForImport();
- }
- }
- }
- }
-
- private void prepareFilesForImport() {
- Set<String> keys = userSpecifiedConfig.keySet();
- if (keys.size() == 1) {
- String userName = "";
- for (String temp : keys) {
- userName = temp;
- }
- List<File> files = FileUtility.getDBDataFileList(backupDirectory);
- for (File fileName : files) {
- String filename = fileName.getAbsolutePath().toString();
- filename = filename.substring(filename.lastIndexOf(File.separator) + 1, filename.length());
- filename = filename.substring(filename.indexOf(".") + 1, filename.length());
- fileName.renameTo(new File(backupDirectory + File.separator + userName + "." + filename));
- }
- }
- }
-
- private String determineDefaultConnection() {
- String importFromDbService = System.getProperty(tableImportSource);
- if (!Strings.isValid(importFromDbService)) {
- importFromDbService = "oracle";
- }
- return importFromDbService;
- }
-
- private Set<String> getImportConnections() {
- String defaultConnection = determineDefaultConnection();
- Set<String> userSchemas = userSpecifiedConfig.keySet();
- Set<String> connectionsNeeded = new TreeSet<String>();
- for (String key : userSchemas) {
- SchemaData schemaDataInUserConfig = userSpecifiedConfig.get(key);
- Map<String, Set<String>> tableNamesToImport = schemaDataInUserConfig.getTablesToImport(tableImportSource);
- Set<String> keys = tableNamesToImport.keySet();
- for (String connectionString : keys) {
- if (connectionString.equals(tableImportSource)) {
- connectionsNeeded.add(defaultConnection);
- } else {
- connectionsNeeded.add(connectionString);
- }
- }
- }
- return connectionsNeeded;
- }
-
- public boolean canRun() {
- return true;
- }
-
- private Map<String, SchemaData> getAvailableSchemasFromImportDb(IProgressMonitor monitor, Set<String> schemas) throws Exception {
- Map<String, SchemaData> schemaMap = new HashMap<String, SchemaData>();
- ExtractDatabaseSchemaOperation schemaExtractor = new ExtractDatabaseSchemaOperation(provider, schemas, schemaMap);
- doSubWork(schemaExtractor, monitor, 0.20);
- return schemaMap;
- }
-
- private Map<String, Set<String>> getTablesToImport(IProgressMonitor monitor, String userName, Set<String> schemasToGet) throws Exception {
- Map<String, SchemaData> currentDbSchemas = getAvailableSchemasFromImportDb(monitor, schemasToGet);
- Set<String> userSchemas = userSpecifiedConfig.keySet();
-
- SchemaData schemaData = currentDbSchemas.get(userName);
- Map<String, TableElement> tableMap = schemaData.getTableMap();
-
- Map<String, Set<String>> importTables = new HashMap<String, Set<String>>();
- for (String key : userSchemas) {
- SchemaData schemaDataInUserConfig = userSpecifiedConfig.get(key);
- Map<String, Set<String>> tableNamesToImport = schemaDataInUserConfig.getTablesToImport(tableImportSource);
-
- Set<String> keys = tableNamesToImport.keySet();
- for (String importKey : keys) {
- Set<String> namesToImport = tableNamesToImport.get(importKey);
-
- for (String tableName : namesToImport) {
- tableName = tableName.replaceAll(key + "\\.", userName + ".");
-
- if (tableMap.containsKey(tableName)) {
- Set<String> tableSet;
- if (importTables.containsKey(importKey)) {
- tableSet = importTables.get(importKey);
- } else {
- tableSet = new TreeSet<String>();
- }
- tableSet.add(tableName);
- importTables.put(importKey, tableSet);
- }
- }
- }
- }
- return importTables;
- }
-
- private void makeBackupDirectoryIfItDoesntExist() {
- if (backupDirectory != null && backupDirectory.exists() && backupDirectory.canWrite()) {
- return;
- } else {
- backupDirectory.mkdirs();
- }
- }
+ private static final File backupDirectory = new File("BackupDirectory");
+
+ private final Map<String, SchemaData> userSpecifiedConfig;
+ private final String tableImportSource;
+ private final IOseeDatabaseService databaseService;
+
+ public ImportDataFromDbServiceOperation(IOseeDatabaseService databaseService, Map<String, SchemaData> userSpecifiedConfig, String tableImportSource) {
+ super("Import Data from Db Service", Activator.PLUGIN_ID);
+ this.databaseService = databaseService;
+ this.userSpecifiedConfig = userSpecifiedConfig;
+ this.tableImportSource = tableImportSource;
+ }
+
+ @Override
+ protected void doWork(IProgressMonitor monitor) throws Exception {
+ Set<String> importConnections = getImportConnections();
+ for (String importFromDbService : importConnections) {
+ System.out.println("Import Table Data from Db: " + importFromDbService);
+
+ IDatabaseInfo dbInfo = DatabaseInfoManager.getDataStoreById(importFromDbService);
+ System.out.println("Gathering information from ..." + importFromDbService);
+
+ String userName = dbInfo.getDatabaseLoginName();
+ if (userName != null && !userName.equals("")) {
+
+ Set<String> schemasToGet = new TreeSet<String>();
+ schemasToGet.add(userName.toUpperCase());
+
+ Map<String, Set<String>> dataToImport = getTablesToImport(monitor, userName.toUpperCase(), schemasToGet);
+ if (dataToImport.size() > 0) {
+ System.out.println(dataToImport.toString().replaceAll(", ", "\n"));
+ makeBackupDirectoryIfItDoesntExist();
+
+ System.out.println("Backing up Files to: " + backupDirectory.getAbsolutePath());
+ DatabaseDataExtractor dbDataExtractor =
+ new DatabaseDataExtractor(databaseService, schemasToGet, backupDirectory);
+
+ Set<String> tablesToImport;
+ if (importFromDbService.equals(determineDefaultConnection())) {
+ tablesToImport = dataToImport.get(tableImportSource);
+ } else {
+ tablesToImport = dataToImport.get(importFromDbService);
+ }
+
+ for (String importTable : tablesToImport) {
+ dbDataExtractor.addTableNameToExtract(importTable);
+ }
+ doSubWork(dbDataExtractor, monitor, 0.10);
+ dbDataExtractor.waitForWorkerThreads();
+
+ prepareFilesForImport();
+ }
+ }
+ }
+ }
+
+ private void prepareFilesForImport() {
+ Set<String> keys = userSpecifiedConfig.keySet();
+ if (keys.size() == 1) {
+ String userName = "";
+ for (String temp : keys) {
+ userName = temp;
+ }
+ List<File> files = FileUtility.getDBDataFileList(backupDirectory);
+ for (File fileName : files) {
+ String filename = fileName.getAbsolutePath().toString();
+ filename = filename.substring(filename.lastIndexOf(File.separator) + 1, filename.length());
+ filename = filename.substring(filename.indexOf(".") + 1, filename.length());
+ fileName.renameTo(new File(backupDirectory + File.separator + userName + "." + filename));
+ }
+ }
+ }
+
+ private String determineDefaultConnection() {
+ String importFromDbService = System.getProperty(tableImportSource);
+ if (!Strings.isValid(importFromDbService)) {
+ importFromDbService = "oracle";
+ }
+ return importFromDbService;
+ }
+
+ private Set<String> getImportConnections() {
+ String defaultConnection = determineDefaultConnection();
+ Set<String> userSchemas = userSpecifiedConfig.keySet();
+ Set<String> connectionsNeeded = new TreeSet<String>();
+ for (String key : userSchemas) {
+ SchemaData schemaDataInUserConfig = userSpecifiedConfig.get(key);
+ Map<String, Set<String>> tableNamesToImport = schemaDataInUserConfig.getTablesToImport(tableImportSource);
+ Set<String> keys = tableNamesToImport.keySet();
+ for (String connectionString : keys) {
+ if (connectionString.equals(tableImportSource)) {
+ connectionsNeeded.add(defaultConnection);
+ } else {
+ connectionsNeeded.add(connectionString);
+ }
+ }
+ }
+ return connectionsNeeded;
+ }
+
+ public boolean canRun() {
+ return true;
+ }
+
+ private Map<String, SchemaData> getAvailableSchemasFromImportDb(IProgressMonitor monitor, Set<String> schemas) throws Exception {
+ Map<String, SchemaData> schemaMap = new HashMap<String, SchemaData>();
+ ExtractDatabaseSchemaOperation schemaExtractor =
+ new ExtractDatabaseSchemaOperation(databaseService, schemas, schemaMap);
+ doSubWork(schemaExtractor, monitor, 0.20);
+ return schemaMap;
+ }
+
+ private Map<String, Set<String>> getTablesToImport(IProgressMonitor monitor, String userName, Set<String> schemasToGet) throws Exception {
+ Map<String, SchemaData> currentDbSchemas = getAvailableSchemasFromImportDb(monitor, schemasToGet);
+ Set<String> userSchemas = userSpecifiedConfig.keySet();
+
+ SchemaData schemaData = currentDbSchemas.get(userName);
+ Map<String, TableElement> tableMap = schemaData.getTableMap();
+
+ Map<String, Set<String>> importTables = new HashMap<String, Set<String>>();
+ for (String key : userSchemas) {
+ SchemaData schemaDataInUserConfig = userSpecifiedConfig.get(key);
+ Map<String, Set<String>> tableNamesToImport = schemaDataInUserConfig.getTablesToImport(tableImportSource);
+
+ Set<String> keys = tableNamesToImport.keySet();
+ for (String importKey : keys) {
+ Set<String> namesToImport = tableNamesToImport.get(importKey);
+
+ for (String tableName : namesToImport) {
+ tableName = tableName.replaceAll(key + "\\.", userName + ".");
+
+ if (tableMap.containsKey(tableName)) {
+ Set<String> tableSet;
+ if (importTables.containsKey(importKey)) {
+ tableSet = importTables.get(importKey);
+ } else {
+ tableSet = new TreeSet<String>();
+ }
+ tableSet.add(tableName);
+ importTables.put(importKey, tableSet);
+ }
+ }
+ }
+ }
+ return importTables;
+ }
+
+ private void makeBackupDirectoryIfItDoesntExist() {
+ if (backupDirectory != null && backupDirectory.exists() && backupDirectory.canWrite()) {
+ return;
+ } else {
+ backupDirectory.mkdirs();
+ }
+ }
} \ No newline at end of file
diff --git a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/OperationFactory.java b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/OperationFactory.java
index b1425572e19..52ee99ebb5f 100644
--- a/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/OperationFactory.java
+++ b/plugins/org.eclipse.osee.framework.core.datastore/src/org/eclipse/osee/framework/core/datastore/schema/operations/OperationFactory.java
@@ -15,6 +15,7 @@ import org.eclipse.osee.framework.core.datastore.internal.Activator;
import org.eclipse.osee.framework.core.datastore.schema.data.SchemaData;
import org.eclipse.osee.framework.core.operation.CompositeOperation;
import org.eclipse.osee.framework.core.operation.IOperation;
+import org.eclipse.osee.framework.database.IOseeDatabaseService;
import org.eclipse.osee.framework.database.IOseeDatabaseServiceProvider;
/**
@@ -22,18 +23,24 @@ import org.eclipse.osee.framework.database.IOseeDatabaseServiceProvider;
*/
public final class OperationFactory {
- private OperationFactory() {
- }
+ private OperationFactory() {
+ }
- public static IOperation createDbSchema(IOseeDatabaseServiceProvider provider, IOseeSchemaProvider schemaProvider, SchemaCreationOptions options) {
- Map<String, SchemaData> userSpecifiedConfig = new HashMap<String, SchemaData>();
- Map<String, SchemaData> currentDatabaseConfig = new HashMap<String, SchemaData>();
+ public static IOperation createDbSchema(final IOseeDatabaseService databaseService, IOseeSchemaProvider schemaProvider, SchemaCreationOptions options) {
+ Map<String, SchemaData> userSpecifiedConfig = new HashMap<String, SchemaData>();
+ Map<String, SchemaData> currentDatabaseConfig = new HashMap<String, SchemaData>();
- Collection<IOperation> ops = new ArrayList<IOperation>();
- ops.add(new LoadUserSchemasOperation(userSpecifiedConfig, schemaProvider, options));
- ops.add(new ExtractDatabaseSchemaOperation(provider, userSpecifiedConfig.keySet(), currentDatabaseConfig));
- ops.add(new CreateSchemaOperation(provider, userSpecifiedConfig, currentDatabaseConfig));
+ Collection<IOperation> ops = new ArrayList<IOperation>();
+ ops.add(new LoadUserSchemasOperation(userSpecifiedConfig, schemaProvider, options));
+ ops.add(new ExtractDatabaseSchemaOperation(databaseService, userSpecifiedConfig.keySet(), currentDatabaseConfig));
+ ops.add(new CreateSchemaOperation(new IOseeDatabaseServiceProvider() {
- return new CompositeOperation("Create OSEE Schema", Activator.PLUGIN_ID, ops);
- }
+ @Override
+ public IOseeDatabaseService getOseeDatabaseService() {
+ return databaseService;
+ }
+ }, userSpecifiedConfig, currentDatabaseConfig));
+
+ return new CompositeOperation("Create OSEE Schema", Activator.PLUGIN_ID, ops);
+ }
}

Back to the top