Skip to main content
aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorStefan Winkler2010-01-16 12:37:56 +0000
committerStefan Winkler2010-01-16 12:37:56 +0000
commite5fd7290cf9721322b416c985ad4e21e93688bca (patch)
tree1cae9b349abfa9220326ac2699cd9291c174257c
parenta903c5b4fda67c7ae3544cc4ba3c31fbec51a913 (diff)
downloadcdo-e5fd7290cf9721322b416c985ad4e21e93688bca.tar.gz
cdo-e5fd7290cf9721322b416c985ad4e21e93688bca.tar.xz
cdo-e5fd7290cf9721322b416c985ad4e21e93688bca.zip
[296440] [DB] Change RDB schema to improve scalability of to-many references in audit mode
https://bugs.eclipse.org/bugs/show_bug.cgi?id=296440
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/plugin.xml4
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/CDODBSchema.java8
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/SmartPreparedStatementCache.java4
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java117
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java2
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java91
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java1259
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java1061
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BasicAbstractListTableMapping.java52
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java134
-rw-r--r--plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java59
-rw-r--r--plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java37
12 files changed, 2670 insertions, 158 deletions
diff --git a/plugins/org.eclipse.emf.cdo.server.db/plugin.xml b/plugins/org.eclipse.emf.cdo.server.db/plugin.xml
index 076fa9346b..e19b78e5ad 100644
--- a/plugins/org.eclipse.emf.cdo.server.db/plugin.xml
+++ b/plugins/org.eclipse.emf.cdo.server.db/plugin.xml
@@ -33,5 +33,9 @@
class="org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalNonAuditMappingStrategy"
type="horizontalNonAudit">
</mappingStrategy>
+ <mappingStrategy
+ class="org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalAuditMappingStrategyWithRanges"
+ type="horizontalAuditWithRanges">
+ </mappingStrategy>
</extension>
</plugin>
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/CDODBSchema.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/CDODBSchema.java
index ff506d8311..a4f6a5c9db 100644
--- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/CDODBSchema.java
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/CDODBSchema.java
@@ -148,6 +148,10 @@ public class CDODBSchema extends DBSchema
public static final String LIST_REVISION_VERSION = "cdo_version"; //$NON-NLS-1$
+ public static final String LIST_REVISION_VERSION_ADDED = "cdo_version_added"; //$NON-NLS-1$
+
+ public static final String LIST_REVISION_VERSION_REMOVED = "cdo_version_removed"; //$NON-NLS-1$
+
public static final String LIST_IDX = "cdo_idx"; //$NON-NLS-1$
public static final String LIST_VALUE = "cdo_value"; //$NON-NLS-1$
@@ -159,6 +163,10 @@ public class CDODBSchema extends DBSchema
public static final String FEATUREMAP_VERSION = "cdo_version"; //$NON-NLS-1$
+ public static final String FEATUREMAP_VERSION_ADDED = "cdo_version_added"; //$NON-NLS-1$
+
+ public static final String FEATUREMAP_VERSION_REMOVED = "cdo_version_removed"; //$NON-NLS-1$
+
public static final String FEATUREMAP_IDX = "cdo_idx"; //$NON-NLS-1$
public static final String FEATUREMAP_TAG = "cdo_tag"; //$NON-NLS-1$
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/SmartPreparedStatementCache.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/SmartPreparedStatementCache.java
index e6fedad034..41d5c42d95 100644
--- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/SmartPreparedStatementCache.java
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/SmartPreparedStatementCache.java
@@ -49,6 +49,10 @@ public class SmartPreparedStatementCache extends AbstractPreparedStatementCache
return result;
}
+ /**
+ * @param ps
+ * the prepared statement to be released to the cache, or <code>null</code>.
+ */
public void releasePreparedStatement(PreparedStatement ps)
{
if (ps != null) // Bug 276926: Silently accept ps == null and do nothing.
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java
index 08cf34dcee..4ff583abf9 100644
--- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java
@@ -20,8 +20,8 @@ import org.eclipse.emf.cdo.server.IStoreChunkReader.Chunk;
import org.eclipse.emf.cdo.server.db.CDODBUtil;
import org.eclipse.emf.cdo.server.db.IDBStoreAccessor;
import org.eclipse.emf.cdo.server.db.IDBStoreChunkReader;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache;
import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability;
-import org.eclipse.emf.cdo.server.db.mapping.IListMapping;
import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping;
import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
@@ -57,20 +57,15 @@ import java.util.Map;
/**
* This abstract base class provides basic behavior needed for mapping many-valued attributes to tables.
- *
+ *
* @author Eike Stepper
* @since 3.0
*/
-public abstract class AbstractFeatureMapTableMapping implements IListMapping
+public abstract class AbstractFeatureMapTableMapping extends BasicAbstractListTableMapping
{
private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, AbstractFeatureMapTableMapping.class);
/**
- * The feature for this mapping.
- */
- private EStructuralFeature feature;
-
- /**
* The table of this mapping.
*/
private IDBTable table;
@@ -90,11 +85,6 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
*/
private Map<Long, ITypeMapping> typeMappings;
- /**
- * The associated mapping strategy.
- */
- private IMappingStrategy mappingStrategy;
-
// --------- SQL strings - see initSqlStrings() -----------------
private String sqlSelectChunksPrefix;
@@ -102,18 +92,13 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
protected String sqlInsert;
- private EClass containingClass;
-
private String sqlGetListLastIndex;
private List<DBType> dbTypes;
public AbstractFeatureMapTableMapping(IMappingStrategy mappingStrategy, EClass eClass, EStructuralFeature feature)
{
- this.mappingStrategy = mappingStrategy;
- this.feature = feature;
- containingClass = eClass;
-
+ super(mappingStrategy, eClass, feature);
initDBTypes();
initTable();
initSqlStrings();
@@ -122,14 +107,13 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
private void initDBTypes()
{
// TODO add annotation processing here ...
-
dbTypes = new ArrayList<DBType>(TypeMappingFactory.getDefaultFeatureMapDBTypes());
}
private void initTable()
{
- String tableName = mappingStrategy.getTableName(containingClass, feature);
- table = mappingStrategy.getStore().getDBSchema().addTable(tableName);
+ String tableName = getMappingStrategy().getTableName(getContainingClass(), getFeature());
+ table = getMappingStrategy().getStore().getDBSchema().addTable(tableName);
// add fields for keys (cdo_id, version, feature_id)
FieldInfo[] fields = getKeyFields();
@@ -274,21 +258,11 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
sqlInsert = builder.toString();
}
- public final EStructuralFeature getFeature()
- {
- return feature;
- }
-
protected List<DBType> getDBTypes()
{
return dbTypes;
}
- public final EClass getContainingClass()
- {
- return containingClass;
- }
-
protected final IDBTable getTable()
{
return table;
@@ -331,33 +305,26 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
if (TRACER.isEnabled())
{
- TRACER.format("Reading list values for feature {0}.{1} of {2}v{3}", containingClass.getName(), feature.getName(),
- revision.getID(), revision.getVersion());
+ TRACER.format("Reading list values for feature {0}.{1} of {2}v{3}", getContainingClass().getName(), getFeature()
+ .getName(), revision.getID(), revision.getVersion());
}
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try
{
String sql = sqlSelectChunksPrefix + sqlOrderByIndex;
-
- pstmt = accessor.getStatementCache().getPreparedStatement(sql, ReuseProbability.HIGH);
-
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.HIGH);
setKeyFields(pstmt, revision);
- // if (TRACER.isEnabled())
- // {
- // TRACER.trace(pstmt.toString());
- // }
-
if (listChunk != CDORevision.UNCHUNKED)
{
pstmt.setMaxRows(listChunk); // optimization - don't read unneeded rows.
}
resultSet = pstmt.executeQuery();
-
while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next())
{
Long tag = resultSet.getLong(1);
@@ -388,13 +355,13 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
finally
{
DBUtil.close(resultSet);
- accessor.getStatementCache().releasePreparedStatement(pstmt);
+ statementCache.releasePreparedStatement(pstmt);
}
if (TRACER.isEnabled())
{
- TRACER.format("Reading list values done for feature {0}.{1} of {2}v{3}", containingClass.getName(), feature //$NON-NLS-1$
- .getName(), revision.getID(), revision.getVersion());
+ TRACER.format("Reading list values done for feature {0}.{1} of {2}v{3}", getContainingClass().getName(),
+ getFeature().getName(), revision.getID(), revision.getVersion());
}
}
@@ -402,7 +369,7 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
{
EStructuralFeature modelFeature = getFeatureByTag(tag);
- TypeMapping typeMapping = (TypeMapping)mappingStrategy.createValueMapping(modelFeature);
+ TypeMapping typeMapping = (TypeMapping)getMappingStrategy().createValueMapping(modelFeature);
String column = CDODBSchema.FEATUREMAP_VALUE + "_" + typeMapping.getDBType();
tagMap.put(tag, column);
@@ -412,7 +379,7 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
/**
* Return the last (maximum) list index. (euals to size-1)
- *
+ *
* @param accessor
* the accessor to use
* @param revision
@@ -421,22 +388,16 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
*/
private int getListLastIndex(IDBStoreAccessor accessor, InternalCDORevision revision)
{
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try
{
- pstmt = accessor.getStatementCache().getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH);
-
+ pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH);
setKeyFields(pstmt, revision);
- // if (TRACER.isEnabled())
- // {
- // TRACER.trace(pstmt.toString());
- // }
-
resultSet = pstmt.executeQuery();
-
if (!resultSet.next())
{
if (TRACER.isEnabled())
@@ -464,7 +425,7 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
finally
{
DBUtil.close(resultSet);
- accessor.getStatementCache().releasePreparedStatement(pstmt);
+ statementCache.releasePreparedStatement(pstmt);
}
}
@@ -472,10 +433,11 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
{
if (TRACER.isEnabled())
{
- TRACER.format("Reading list chunk values for feature {0}.{1} of {2}v{3}", containingClass.getName(), feature //$NON-NLS-1$
- .getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
+ TRACER.format("Reading list chunk values for feature {0}.{1} of {2}v{3}", getContainingClass().getName(),
+ getFeature().getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
}
+ IPreparedStatementCache statementCache = chunkReader.getAccessor().getStatementCache();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
@@ -491,7 +453,7 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
builder.append(sqlOrderByIndex);
String sql = builder.toString();
- pstmt = chunkReader.getAccessor().getStatementCache().getPreparedStatement(sql, ReuseProbability.LOW);
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.LOW);
setKeyFields(pstmt, chunkReader.getRevision());
resultSet = pstmt.executeQuery();
@@ -538,8 +500,8 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
if (TRACER.isEnabled())
{
- TRACER.format("Reading list chunk values done for feature {0}.{1} of {2}v{3}", containingClass.getName(),
- getTagByFeature(feature), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
+ TRACER.format("Reading list chunk values done for feature {0}.{1} of {2}v{3}", getContainingClass().getName(),
+ getTagByFeature(getFeature()), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
}
}
catch (SQLException ex)
@@ -549,7 +511,7 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
finally
{
DBUtil.close(resultSet);
- chunkReader.getAccessor().getStatementCache().releasePreparedStatement(pstmt);
+ statementCache.releasePreparedStatement(pstmt);
}
}
@@ -566,13 +528,14 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
protected final void writeValue(IDBStoreAccessor accessor, CDORevision revision, int idx, Object value)
{
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
PreparedStatement stmt = null;
if (TRACER.isEnabled())
{
TRACER
.format(
- "Writing value for feature {0}.{1} index {2} of {3}v{4} : {5}", containingClass.getName(), getTagByFeature(feature), idx, revision.getID(), revision.getVersion(), value); //$NON-NLS-1$
+ "Writing value for feature {0}.{1} index {2} of {3}v{4} : {5}", getContainingClass().getName(), getTagByFeature(getFeature()), idx, revision.getID(), revision.getVersion(), value); //$NON-NLS-1$
}
try
@@ -583,9 +546,7 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
String column = getColumnName(tag);
String sql = sqlInsert;
-
- stmt = accessor.getStatementCache().getPreparedStatement(sql, ReuseProbability.HIGH);
-
+ stmt = statementCache.getPreparedStatement(sql, ReuseProbability.HIGH);
setKeyFields(stmt, revision);
int stmtIndex = getKeyFields().length + 1;
@@ -603,7 +564,6 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
stmt.setInt(stmtIndex++, idx);
stmt.setLong(stmtIndex++, tag);
-
CDODBUtil.sqlUpdate(stmt, true);
}
catch (SQLException e)
@@ -612,18 +572,17 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
}
finally
{
- accessor.getStatementCache().releasePreparedStatement(stmt);
+ statementCache.releasePreparedStatement(stmt);
}
}
/**
* Get column name (lazy)
- *
+ *
* @param tag
* The feature's MetaID in CDO
* @return the column name where the values are stored
*/
-
protected String getColumnName(Long tag)
{
String column = tagMap.get(tag);
@@ -638,12 +597,11 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
/**
* Get type mapping (lazy)
- *
+ *
* @param tag
* The feature's MetaID in CDO
* @return the corresponding type mapping
*/
-
protected ITypeMapping getTypeMapping(Long tag)
{
ITypeMapping typeMapping = typeMappings.get(tag);
@@ -660,10 +618,9 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
* @param metaID
* @return the column name where the values are stored
*/
-
private EStructuralFeature getFeatureByTag(Long tag)
{
- return (EStructuralFeature)mappingStrategy.getStore().getMetaDataManager().getMetaInstance(tag);
+ return (EStructuralFeature)getMappingStrategy().getStore().getMetaDataManager().getMetaInstance(tag);
}
/**
@@ -671,21 +628,15 @@ public abstract class AbstractFeatureMapTableMapping implements IListMapping
* The EStructuralFeature
* @return The feature's MetaID in CDO
*/
-
protected Long getTagByFeature(EStructuralFeature feature)
{
- return mappingStrategy.getStore().getMetaDataManager().getMetaID(feature);
+ return getMappingStrategy().getStore().getMetaDataManager().getMetaID(feature);
}
- /**
- * @param metaID
- * The feature's MetaID in CDO
- * @return the column name where the values are stored
- */
/**
* Used by subclasses to indicate which fields should be in the table. I.e. just a pair of name and DBType ...
- *
+ *
* @author Stefan Winkler
*/
protected static class FieldInfo
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java
index d7d43ef58f..847751acb2 100644
--- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java
@@ -350,7 +350,7 @@ public abstract class AbstractHorizontalClassMapping implements IClassMapping
return tables;
}
- private void checkDuplicateResources(IDBStoreAccessor accessor, CDORevision revision) throws IllegalStateException
+ protected void checkDuplicateResources(IDBStoreAccessor accessor, CDORevision revision) throws IllegalStateException
{
CDOID folderID = (CDOID)revision.data().getContainerID();
String name = (String)revision.data().get(EresourcePackage.eINSTANCE.getCDOResourceNode_Name(), 0);
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java
index b7efc61f70..54cddd7266 100644
--- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java
@@ -18,8 +18,8 @@ import org.eclipse.emf.cdo.server.IStoreChunkReader.Chunk;
import org.eclipse.emf.cdo.server.db.CDODBUtil;
import org.eclipse.emf.cdo.server.db.IDBStoreAccessor;
import org.eclipse.emf.cdo.server.db.IDBStoreChunkReader;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache;
import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability;
-import org.eclipse.emf.cdo.server.db.mapping.IListMapping;
import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping;
import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
@@ -52,16 +52,11 @@ import java.util.List;
* @author Eike Stepper
* @since 2.0
*/
-public abstract class AbstractListTableMapping implements IListMapping
+public abstract class AbstractListTableMapping extends BasicAbstractListTableMapping
{
private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, AbstractListTableMapping.class);
/**
- * The feature for this mapping.
- */
- private EStructuralFeature feature;
-
- /**
* The table of this mapping.
*/
private IDBTable table;
@@ -71,11 +66,6 @@ public abstract class AbstractListTableMapping implements IListMapping
*/
private ITypeMapping typeMapping;
- /**
- * The associated mapping strategy.
- */
- private IMappingStrategy mappingStrategy;
-
// --------- SQL strings - see initSqlStrings() -----------------
private String sqlSelectChunksPrefix;
@@ -83,23 +73,19 @@ public abstract class AbstractListTableMapping implements IListMapping
private String sqlInsertEntry;
- private EClass containingClass;
-
private String sqlGetListLastIndex;
public AbstractListTableMapping(IMappingStrategy mappingStrategy, EClass eClass, EStructuralFeature feature)
{
- this.mappingStrategy = mappingStrategy;
- this.feature = feature;
- containingClass = eClass;
-
+ super(mappingStrategy, eClass, feature);
initTable();
initSqlStrings();
}
private void initTable()
{
- String tableName = mappingStrategy.getTableName(containingClass, feature);
+ IMappingStrategy mappingStrategy = getMappingStrategy();
+ String tableName = mappingStrategy.getTableName(getContainingClass(), getFeature());
table = mappingStrategy.getStore().getDBSchema().addTable(tableName);
// add fields for keys (cdo_id, version, feature_id)
@@ -115,7 +101,7 @@ public abstract class AbstractListTableMapping implements IListMapping
dbFields[dbFields.length - 1] = table.addField(CDODBSchema.LIST_IDX, DBType.INTEGER);
// add field for value
- typeMapping = mappingStrategy.createValueMapping(feature);
+ typeMapping = mappingStrategy.createValueMapping(getFeature());
typeMapping.createDBField(table, CDODBSchema.LIST_VALUE);
// add table indexes
@@ -212,16 +198,6 @@ public abstract class AbstractListTableMapping implements IListMapping
sqlInsertEntry = builder.toString();
}
- public final EStructuralFeature getFeature()
- {
- return feature;
- }
-
- public final EClass getContainingClass()
- {
- return containingClass;
- }
-
protected final IDBTable getTable()
{
return table;
@@ -254,33 +230,26 @@ public abstract class AbstractListTableMapping implements IListMapping
if (TRACER.isEnabled())
{
- TRACER.format("Reading list values for feature {0}.{1} of {2}v{3}", containingClass.getName(), feature.getName(), //$NON-NLS-1$
- revision.getID(), revision.getVersion());
+ TRACER.format("Reading list values for feature {0}.{1} of {2}v{3}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), revision.getID(), revision.getVersion());
}
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try
{
String sql = sqlSelectChunksPrefix + sqlOrderByIndex;
-
- pstmt = accessor.getStatementCache().getPreparedStatement(sql, ReuseProbability.HIGH);
-
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.HIGH);
setKeyFields(pstmt, revision);
- // if (TRACER.isEnabled())
- // {
- // TRACER.trace(pstmt.toString());
- // }
-
if (listChunk != CDORevision.UNCHUNKED)
{
pstmt.setMaxRows(listChunk); // optimization - don't read unneeded rows.
}
resultSet = pstmt.executeQuery();
-
while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next())
{
Object value = typeMapping.readValue(resultSet);
@@ -309,13 +278,13 @@ public abstract class AbstractListTableMapping implements IListMapping
finally
{
DBUtil.close(resultSet);
- accessor.getStatementCache().releasePreparedStatement(pstmt);
+ statementCache.releasePreparedStatement(pstmt);
}
if (TRACER.isEnabled())
{
- TRACER.format("Reading list values done for feature {0}.{1} of {2}v{3}", containingClass.getName(), feature //$NON-NLS-1$
- .getName(), revision.getID(), revision.getVersion());
+ TRACER.format("Reading list values done for feature {0}.{1} of {2}v{3}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), revision.getID(), revision.getVersion());
}
}
@@ -330,22 +299,16 @@ public abstract class AbstractListTableMapping implements IListMapping
*/
private int getListLastIndex(IDBStoreAccessor accessor, InternalCDORevision revision)
{
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
try
{
- pstmt = accessor.getStatementCache().getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH);
-
+ pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH);
setKeyFields(pstmt, revision);
- // if (TRACER.isEnabled())
- // {
- // TRACER.trace(pstmt.toString());
- // }
-
resultSet = pstmt.executeQuery();
-
if (!resultSet.next())
{
if (TRACER.isEnabled())
@@ -373,7 +336,7 @@ public abstract class AbstractListTableMapping implements IListMapping
finally
{
DBUtil.close(resultSet);
- accessor.getStatementCache().releasePreparedStatement(pstmt);
+ statementCache.releasePreparedStatement(pstmt);
}
}
@@ -381,10 +344,11 @@ public abstract class AbstractListTableMapping implements IListMapping
{
if (TRACER.isEnabled())
{
- TRACER.format("Reading list chunk values for feature {0}.{1} of {2}v{3}", containingClass.getName(), feature //$NON-NLS-1$
- .getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
+ TRACER.format("Reading list chunk values for feature {0}.{1} of {2}v{3}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
}
+ IPreparedStatementCache statementCache = chunkReader.getAccessor().getStatementCache();
PreparedStatement pstmt = null;
ResultSet resultSet = null;
@@ -400,7 +364,7 @@ public abstract class AbstractListTableMapping implements IListMapping
builder.append(sqlOrderByIndex);
String sql = builder.toString();
- pstmt = chunkReader.getAccessor().getStatementCache().getPreparedStatement(sql, ReuseProbability.LOW);
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.LOW);
setKeyFields(pstmt, chunkReader.getRevision());
resultSet = pstmt.executeQuery();
@@ -446,8 +410,8 @@ public abstract class AbstractListTableMapping implements IListMapping
if (TRACER.isEnabled())
{
- TRACER.format("Reading list chunk values done for feature {0}.{1} of {2}v{3}", containingClass.getName(), //$NON-NLS-1$
- feature.getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
+ TRACER.format("Reading list chunk values done for feature {0}.{1} of {2}v{3}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
}
}
catch (SQLException ex)
@@ -457,7 +421,7 @@ public abstract class AbstractListTableMapping implements IListMapping
finally
{
DBUtil.close(resultSet);
- chunkReader.getAccessor().getStatementCache().releasePreparedStatement(pstmt);
+ statementCache.releasePreparedStatement(pstmt);
}
}
@@ -474,17 +438,18 @@ public abstract class AbstractListTableMapping implements IListMapping
protected final void writeValue(IDBStoreAccessor accessor, CDORevision revision, int idx, Object value)
{
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
PreparedStatement stmt = null;
if (TRACER.isEnabled())
{
- TRACER.format("Writing value for feature {0}.{1} index {2} of {3}v{4} : {5}", containingClass.getName(), feature //$NON-NLS-1$
- .getName(), idx, revision.getID(), revision.getVersion(), value);
+ TRACER.format("Writing value for feature {0}.{1} index {2} of {3}v{4} : {5}", getContainingClass().getName(),
+ getFeature().getName(), idx, revision.getID(), revision.getVersion(), value);
}
try
{
- stmt = accessor.getStatementCache().getPreparedStatement(sqlInsertEntry, ReuseProbability.HIGH);
+ stmt = statementCache.getPreparedStatement(sqlInsertEntry, ReuseProbability.HIGH);
setKeyFields(stmt, revision);
int stmtIndex = getKeyFields().length + 1;
@@ -499,7 +464,7 @@ public abstract class AbstractListTableMapping implements IListMapping
}
finally
{
- accessor.getStatementCache().releasePreparedStatement(stmt);
+ statementCache.releasePreparedStatement(stmt);
}
}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java
new file mode 100644
index 0000000000..d9f0b2b3a8
--- /dev/null
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java
@@ -0,0 +1,1259 @@
+/**
+ * Copyright (c) 2004 - 2009 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Eike Stepper - initial API and implementation
+ * Stefan Winkler - Bug 271444: [DB] Multiple refactorings bug 271444
+ * Christopher Albert - Bug 254455: [DB] Support FeatureMaps bug 254455
+ * Victor Roldan Betancort - Bug 283998: [DB] Chunk reading for multiple chunks fails
+ * Lothar Werzinger - Bug 296440: [DB] Change RDB schema to improve scalability of to-many references in audit mode
+ * Stefan Winkler - cleanup, merge and maintenance *
+ */
+package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
+
+import org.eclipse.emf.cdo.common.id.CDOID;
+import org.eclipse.emf.cdo.common.id.CDOIDUtil;
+import org.eclipse.emf.cdo.common.revision.CDOList;
+import org.eclipse.emf.cdo.common.revision.CDORevision;
+import org.eclipse.emf.cdo.common.revision.CDORevisionUtil;
+import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOContainerFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDeltaVisitor;
+import org.eclipse.emf.cdo.common.revision.delta.CDOListFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOMoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDORemoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOSetFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOUnsetFeatureDelta;
+import org.eclipse.emf.cdo.server.IStoreChunkReader.Chunk;
+import org.eclipse.emf.cdo.server.db.CDODBUtil;
+import org.eclipse.emf.cdo.server.db.IDBStoreAccessor;
+import org.eclipse.emf.cdo.server.db.IDBStoreChunkReader;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability;
+import org.eclipse.emf.cdo.server.db.mapping.IListMappingDeltaSupport;
+import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
+import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping;
+import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
+import org.eclipse.emf.cdo.server.internal.db.bundle.OM;
+import org.eclipse.emf.cdo.server.internal.db.mapping.TypeMapping;
+import org.eclipse.emf.cdo.server.internal.db.mapping.TypeMappingFactory;
+import org.eclipse.emf.cdo.spi.common.revision.InternalCDOList;
+import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision;
+
+import org.eclipse.net4j.db.DBException;
+import org.eclipse.net4j.db.DBType;
+import org.eclipse.net4j.db.DBUtil;
+import org.eclipse.net4j.db.ddl.IDBField;
+import org.eclipse.net4j.db.ddl.IDBTable;
+import org.eclipse.net4j.db.ddl.IDBIndex.Type;
+import org.eclipse.net4j.util.ImplementationError;
+import org.eclipse.net4j.util.collection.MoveableList;
+import org.eclipse.net4j.util.om.trace.ContextTracer;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EStructuralFeature;
+import org.eclipse.emf.ecore.util.FeatureMap;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This is a featuremap-table mapping for audit mode. It is optimized for frequent insert operations at the list's end,
+ * which causes just 1 DB row to be changed. This is achieved by introducing a version range (columns
+ * {@link CDODBSchema#LIST_REVISION_VERSION_ADDED cdo_version_added} and
+ * {@link CDODBSchema#LIST_REVISION_VERSION_REMOVED cdo_version_removed}) which records for which revisions a particular
+ * entry existed. Also, this mapping is mainly optimized for potentially very large lists: the need for having the
+ * complete list stored in memory to do in-the-middle-moved and inserts is traded in for a few more DB access
+ * operations.
+ *
+ * @author Eike Stepper
+ * @author Stefan Winkler
+ * @author Lothar Werzinger
+ * @since 3.0
+ */
+public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTableMapping implements
+ IListMappingDeltaSupport
+{
+ private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, AuditFeatureMapTableMappingWithRanges.class);
+
+ /**
+ * Used to clean up lists for detached objects.
+ */
+ private static final int FINAL_VERSION = Integer.MAX_VALUE;
+
+ /**
+ * The table of this mapping.
+ */
+ private IDBTable table;
+
+ /**
+ * The tags mapped to column names
+ */
+ private HashMap<Long, String> tagMap;
+
+ /**
+ * Column name Set
+ */
+ private List<String> columnNames;
+
+ /**
+ * The type mappings for the value fields.
+ */
+ private Map<Long, ITypeMapping> typeMappings;
+
+ // --------- SQL strings - see initSqlStrings() -----------------
+ private String sqlSelectChunksPrefix;
+
+ private String sqlOrderByIndex;
+
+ protected String sqlInsert;
+
+ private String sqlGetListLastIndex;
+
+ private List<DBType> dbTypes;
+
+ private String sqlRemoveEntry;
+
+ private String sqlDeleteEntry;
+
+ private String sqlUpdateIndex;
+
+ private String sqlGetValue;
+
+ private String sqlClearList;
+
+ private String sqlDeleteList;
+
+ public AuditFeatureMapTableMappingWithRanges(IMappingStrategy mappingStrategy, EClass eClass,
+ EStructuralFeature feature)
+ {
+ super(mappingStrategy, eClass, feature);
+ initDBTypes();
+ initTable();
+ initSqlStrings();
+ }
+
+ private void initDBTypes()
+ {
+ // TODO add annotation processing here ...
+ dbTypes = new ArrayList<DBType>(TypeMappingFactory.getDefaultFeatureMapDBTypes());
+ }
+
+ private void initTable()
+ {
+ String tableName = getMappingStrategy().getTableName(getContainingClass(), getFeature());
+ table = getMappingStrategy().getStore().getDBSchema().addTable(tableName);
+
+ // add fields for CDOID
+ IDBField idField = table.addField(CDODBSchema.FEATUREMAP_REVISION_ID, DBType.INTEGER);
+
+ // add fields for version range
+ IDBField versionAddedField = table.addField(CDODBSchema.FEATUREMAP_VERSION_ADDED, DBType.INTEGER);
+ IDBField versionRemovedField = table.addField(CDODBSchema.FEATUREMAP_VERSION_REMOVED, DBType.INTEGER);
+
+ // add field for list index
+ IDBField idxField = table.addField(CDODBSchema.FEATUREMAP_IDX, DBType.INTEGER);
+
+ // add field for FeatureMap tag (MetaID for Feature in CDO registry)
+ IDBField tagField = table.addField(CDODBSchema.FEATUREMAP_TAG, DBType.INTEGER);
+
+ tagMap = new HashMap<Long, String>();
+ typeMappings = new HashMap<Long, ITypeMapping>();
+ columnNames = new ArrayList<String>();
+
+ // create columns for all DBTypes
+ for (DBType type : getDBTypes())
+ {
+ String column = CDODBSchema.FEATUREMAP_VALUE + "_" + type.name();
+ table.addField(column, type);
+ columnNames.add(column);
+ }
+
+ // TODO think about indices
+ table.addIndex(Type.NON_UNIQUE, idField);
+ table.addIndex(Type.NON_UNIQUE, versionAddedField);
+ table.addIndex(Type.NON_UNIQUE, versionRemovedField);
+ table.addIndex(Type.NON_UNIQUE, idxField);
+ table.addIndex(Type.NON_UNIQUE, tagField);
+ }
+
+ public Collection<IDBTable> getDBTables()
+ {
+ return Arrays.asList(table);
+ }
+
+ private void initSqlStrings()
+ {
+ String tableName = getTable().getName();
+
+ // ---------------- SELECT to read chunks ----------------------------
+ StringBuilder builder = new StringBuilder();
+ builder.append("SELECT ");
+
+ builder.append(CDODBSchema.FEATUREMAP_TAG);
+ builder.append(", ");
+
+ Iterator<String> iter = columnNames.iterator();
+ while (iter.hasNext())
+ {
+ builder.append(iter.next());
+ if (iter.hasNext())
+ {
+ builder.append(", ");
+ }
+ }
+
+ builder.append(" FROM ");
+ builder.append(tableName);
+ builder.append(" WHERE ");
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append(" <= ? AND ( "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL OR "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" > ? )"); //$NON-NLS-1$
+ sqlSelectChunksPrefix = builder.toString();
+
+ sqlOrderByIndex = " ORDER BY " + CDODBSchema.FEATUREMAP_IDX; //$NON-NLS-1$
+
+ // ----------------- count list size --------------------------
+
+ builder = new StringBuilder("SELECT count(1) FROM ");
+ builder.append(tableName);
+ builder.append(" WHERE ");
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append(" <= ? AND ( "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL OR "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" > ? )"); //$NON-NLS-1$
+ sqlGetListLastIndex = builder.toString();
+
+ // ----------------- INSERT - prefix -----------------
+ builder = new StringBuilder("INSERT INTO ");
+ builder.append(tableName);
+ builder.append("(");
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_VALUE);
+
+ for (int i = 0; i < columnNames.size(); i++)
+ {
+ builder.append(columnNames.get(i));
+ builder.append(", "); //$NON-NLS-1$
+ }
+
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(", "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_TAG);
+ builder.append(") VALUES (?, ?, ?, ?, ?, "); //$NON-NLS-1$
+ for (int i = 0; i < columnNames.size(); i++)
+ {
+ builder.append("?, ");
+ }
+
+ builder.append("?, ?)");
+ sqlInsert = builder.toString();
+
+ // ----------------- remove current entry -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" = ? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlRemoveEntry = builder.toString();
+
+ // ----------------- delete temporary entry -----------------
+ builder = new StringBuilder("DELETE FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append(" = ?"); //$NON-NLS-1$
+ sqlDeleteEntry = builder.toString();
+
+ // ----------------- update index -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(" = ? WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(" = ?"); //$NON-NLS-1$
+ sqlUpdateIndex = builder.toString();
+
+ // ----------------- get current value -----------------
+ builder = new StringBuilder("SELECT "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_TAG);
+ builder.append(", ");
+
+ iter = columnNames.iterator();
+ while (iter.hasNext())
+ {
+ builder.append(iter.next());
+ if (iter.hasNext())
+ {
+ builder.append(", ");
+ }
+ }
+
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlGetValue = builder.toString();
+
+ // ----------- clear list items -------------------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" = ? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlClearList = builder.toString();
+
+ // ----------- delete temporary list items -------------------------
+ builder = new StringBuilder("DELETE FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlDeleteList = builder.toString();
+ }
+
+ protected List<DBType> getDBTypes()
+ {
+ return dbTypes;
+ }
+
+ protected final IDBTable getTable()
+ {
+ return table;
+ }
+
+ protected final List<String> getColumnNames()
+ {
+ return columnNames;
+ }
+
+ protected final Map<Long, ITypeMapping> getTypeMappings()
+ {
+ return typeMappings;
+ }
+
+ protected final Map<Long, String> getTagMap()
+ {
+ return tagMap;
+ }
+
+ public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, int listChunk)
+ {
+ MoveableList<Object> list = revision.getList(getFeature());
+ int listSize = -1;
+
+ if (listChunk != CDORevision.UNCHUNKED)
+ {
+ listSize = getListLastIndex(accessor, revision);
+ if (listSize == -1)
+ {
+ // list is empty - take shortcut
+ return;
+ }
+
+ // subtract amount of items we are going to read now
+ listSize -= listChunk;
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list values for feature {0}.{1} of {2}v{3}", getContainingClass().getName(), getFeature()
+ .getName(), revision.getID(), revision.getVersion());
+ }
+
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ try
+ {
+ String sql = sqlSelectChunksPrefix + sqlOrderByIndex;
+
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.HIGH);
+
+ pstmt.setLong(1, CDOIDUtil.getLong(revision.getID()));
+ pstmt.setInt(2, revision.getVersion());
+ pstmt.setInt(3, revision.getVersion());
+
+ if (listChunk != CDORevision.UNCHUNKED)
+ {
+ pstmt.setMaxRows(listChunk); // optimization - don't read unneeded rows.
+ }
+
+ resultSet = pstmt.executeQuery();
+ while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next())
+ {
+ Long tag = resultSet.getLong(1);
+ Object value = getTypeMapping(tag).readValue(resultSet);
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value for index {0} from result set: {1}", list.size(), value);
+ }
+
+ list.add(CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value));
+ }
+
+ while (listSize-- >= 0)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Adding UNINITIALIZED for index {0} ", list.size());
+ }
+
+ list.add(InternalCDOList.UNINITIALIZED);
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list values done for feature {0}.{1} of {2}v{3}", getContainingClass().getName(),
+ getFeature().getName(), revision.getID(), revision.getVersion());
+ }
+ }
+
+ private void addFeature(Long tag)
+ {
+ EStructuralFeature modelFeature = getFeatureByTag(tag);
+
+ TypeMapping typeMapping = (TypeMapping)getMappingStrategy().createValueMapping(modelFeature);
+ String column = CDODBSchema.FEATUREMAP_VALUE + "_" + typeMapping.getDBType();
+
+ tagMap.put(tag, column);
+ typeMapping.setDBField(table, column);
+ typeMappings.put(tag, typeMapping);
+ }
+
+ /**
+ * Return the last (maximum) list index. (euals to size-1)
+ *
+ * @param accessor
+ * the accessor to use
+ * @param revision
+ * the revision to which the feature list belongs
+ * @return the last index or <code>-1</code> if the list is empty.
+ */
+ private int getListLastIndex(IDBStoreAccessor accessor, InternalCDORevision revision)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH);
+
+ pstmt.setLong(1, CDOIDUtil.getLong(revision.getID()));
+ pstmt.setInt(2, revision.getVersion());
+ pstmt.setInt(3, revision.getVersion());
+
+ resultSet = pstmt.executeQuery();
+ if (!resultSet.next())
+ {
+ throw new DBException("count expects exactly one result.");
+ }
+ else
+ {
+ int result = resultSet.getInt(1) - 1;
+ if (TRACER.isEnabled())
+ {
+ TRACER.trace("Read list last index = " + result);
+ }
+
+ return result;
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values for feature {0}.{1} of {2}v{3}", getContainingClass().getName(),
+ getFeature().getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
+ }
+
+ IPreparedStatementCache statementCache = chunkReader.getAccessor().getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ try
+ {
+ StringBuilder builder = new StringBuilder(sqlSelectChunksPrefix);
+ if (where != null)
+ {
+ builder.append(" AND "); //$NON-NLS-1$
+ builder.append(where);
+ }
+
+ builder.append(sqlOrderByIndex);
+
+ String sql = builder.toString();
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.LOW);
+ pstmt.setLong(1, CDOIDUtil.getLong(chunkReader.getRevision().getID()));
+ pstmt.setInt(2, chunkReader.getRevision().getVersion());
+ pstmt.setInt(3, chunkReader.getRevision().getVersion());
+
+ resultSet = pstmt.executeQuery();
+
+ Chunk chunk = null;
+ int chunkSize = 0;
+ int chunkIndex = 0;
+ int indexInChunk = 0;
+
+ while (resultSet.next())
+ {
+ Long tag = resultSet.getLong(1);
+ Object value = getTypeMapping(tag).readValue(resultSet);
+
+ if (chunk == null)
+ {
+ chunk = chunks.get(chunkIndex++);
+ chunkSize = chunk.size();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Current chunk no. {0} is [start = {1}, size = {2}]", chunkIndex - 1, chunk.getStartIndex(),
+ chunkSize);
+ }
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value for chunk index {0} from result set: {1}", indexInChunk, value);
+ }
+
+ chunk.add(indexInChunk++, CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value));
+ if (indexInChunk == chunkSize)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Chunk finished.");
+ }
+
+ chunk = null;
+ indexInChunk = 0;
+ }
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values done for feature {0}.{1} of {2}v{3}", getContainingClass().getName(),
+ getTagByFeature(getFeature()), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ public void writeValues(IDBStoreAccessor accessor, InternalCDORevision revision)
+ {
+ CDOList values = revision.getList(getFeature());
+
+ int idx = 0;
+ for (Object element : values)
+ {
+ writeValue(accessor, revision, idx++, element);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Writing done");
+ }
+ }
+
+ protected final void writeValue(IDBStoreAccessor accessor, CDORevision revision, int idx, Object value)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER
+ .format(
+ "Writing value for feature {0}.{1} index {2} of {3}v{4} : {5}", getContainingClass().getName(), getTagByFeature(getFeature()), idx, revision.getID(), revision.getVersion(), value); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, revision.getID(), revision.getVersion(), idx, value);
+ }
+
+ /**
+ * Get column name (lazy).
+ *
+ * @param tag
+ * The feature's MetaID in CDO
+ * @return the column name where the values are stored
+ */
+ protected String getColumnName(Long tag)
+ {
+ String column = tagMap.get(tag);
+ if (column == null)
+ {
+ addFeature(tag);
+ column = tagMap.get(tag);
+ }
+
+ return column;
+ }
+
+ /**
+ * Get type mapping (lazy).
+ *
+ * @param tag
+ * The feature's MetaID in CDO
+ * @return the corresponding type mapping
+ */
+ protected ITypeMapping getTypeMapping(Long tag)
+ {
+ ITypeMapping typeMapping = typeMappings.get(tag);
+ if (typeMapping == null)
+ {
+ addFeature(tag);
+ typeMapping = typeMappings.get(tag);
+ }
+
+ return typeMapping;
+ }
+
+ /**
+ * @param metaID
+ * @return the column name where the values are stored
+ */
+ private EStructuralFeature getFeatureByTag(Long tag)
+ {
+ return (EStructuralFeature)getMappingStrategy().getStore().getMetaDataManager().getMetaInstance(tag);
+ }
+
+ /**
+ * @param feature
+ * The EStructuralFeature
+ * @return The feature's MetaID in CDO
+ */
+ protected Long getTagByFeature(EStructuralFeature feature)
+ {
+ return getMappingStrategy().getStore().getMetaDataManager().getMetaID(feature);
+ }
+
+ /**
+ * Clear a list of a given revision.
+ *
+ * @param accessor
+ * the accessor to use
+ * @param id
+ * the id of the revision from which to remove all items
+ */
+ public void clearList(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmtDeleteTemp = null;
+ PreparedStatement pstmtClear = null;
+
+ try
+ {
+ // delete temporary entries
+ pstmtDeleteTemp = statementCache.getPreparedStatement(sqlDeleteList, ReuseProbability.HIGH);
+ pstmtDeleteTemp.setLong(1, CDOIDUtil.getLong(id));
+ pstmtDeleteTemp.setInt(2, newVersion);
+
+ int result = CDODBUtil.sqlUpdate(pstmtDeleteTemp, false);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("DeleteList result: {0}", result); //$NON-NLS-1$
+ }
+
+ // clear rest of the list
+ pstmtClear = statementCache.getPreparedStatement(sqlClearList, ReuseProbability.HIGH);
+ pstmtClear.setInt(1, newVersion);
+ pstmtClear.setLong(2, CDOIDUtil.getLong(id));
+
+ result = CDODBUtil.sqlUpdate(pstmtClear, false);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ClearList result: {0}", result); //$NON-NLS-1$
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmtDeleteTemp);
+ statementCache.releasePreparedStatement(pstmtClear);
+ }
+ }
+
+ public void objectRevised(IDBStoreAccessor accessor, CDOID id, long revised)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("objectRevised {0} {1}", id, revised);
+ }
+
+ // get revision from cache to find out version number
+ CDORevision revision = getMappingStrategy().getStore().getRepository().getRevisionManager().getRevision(id, 0,
+ CDORevision.DEPTH_NONE);
+
+ // set cdo_revision_removed for all list items (so we have no NULL values)
+ clearList(accessor, id, revision.getVersion(), FINAL_VERSION);
+ }
+
+ public void processDelta(final IDBStoreAccessor accessor, final CDOID id, int oldVersion, final int newVersion,
+ long created, CDOListFeatureDelta delta)
+ {
+ InternalCDORevision originalRevision = (InternalCDORevision)accessor.getStore().getRepository()
+ .getRevisionManager().getRevision(id, 0, CDORevision.DEPTH_NONE);
+ int oldListSize = originalRevision.getList(getFeature()).size();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ListTableMapping.processDelta for revision {0} - previous list size: {1}", originalRevision,
+ oldListSize);
+ }
+
+ // let the visitor collect the changes
+ ListDeltaVisitor visitor = new ListDeltaVisitor(accessor, originalRevision, oldVersion, newVersion);
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("processing deltas ...");
+ }
+
+ for (CDOFeatureDelta listDelta : delta.getListChanges())
+ {
+ listDelta.accept(visitor);
+ }
+ }
+
+ private class ListDeltaVisitor implements CDOFeatureDeltaVisitor
+ {
+ private IDBStoreAccessor accessor;
+
+ private InternalCDORevision originalRevision;
+
+ private CDOID id;
+
+ private int oldVersion;
+
+ private int newVersion;
+
+ private int lastIndex;
+
+ public ListDeltaVisitor(IDBStoreAccessor accessor, InternalCDORevision originalRevision, int oldVersion,
+ int newVersion)
+ {
+ this.accessor = accessor;
+ this.originalRevision = originalRevision;
+ id = this.originalRevision.getID();
+ this.oldVersion = oldVersion;
+ this.newVersion = newVersion;
+ lastIndex = originalRevision.getList(getFeature()).size() - 1;
+ }
+
+ public void visit(CDOMoveFeatureDelta delta)
+ {
+ int fromIdx = delta.getOldPosition();
+ int toIdx = delta.getNewPosition();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Moving: {0} to {1}", fromIdx, toIdx); //$NON-NLS-1$
+ }
+
+ Object value = getValue(accessor, id, fromIdx);
+
+ // remove the item
+ removeEntry(accessor, id, oldVersion, newVersion, fromIdx);
+
+ // adjust indexes and shift either up or down
+ if (fromIdx < toIdx)
+ {
+ moveOneUp(accessor, id, oldVersion, newVersion, fromIdx + 1, toIdx);
+ }
+ else
+ { // fromIdx > toIdx here
+ moveOneDown(accessor, id, oldVersion, newVersion, toIdx, fromIdx - 1);
+ }
+
+ // create the item
+ addEntry(accessor, id, newVersion, toIdx, value);
+ }
+
+ public void visit(CDOAddFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Adding at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ if (startIndex <= endIndex)
+ {
+ // make room for the new item
+ moveOneDown(accessor, id, oldVersion, newVersion, startIndex, endIndex);
+ }
+
+ // create the item
+ addEntry(accessor, id, newVersion, startIndex, delta.getValue());
+
+ ++lastIndex;
+ }
+
+ public void visit(CDORemoveFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Removing at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, oldVersion, newVersion, startIndex);
+
+ // make room for the new item
+ moveOneUp(accessor, id, oldVersion, newVersion, startIndex + 1, endIndex);
+
+ --lastIndex;
+ }
+
+ public void visit(CDOSetFeatureDelta delta)
+ {
+ int index = delta.getIndex();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Setting at: {0}", index); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, oldVersion, newVersion, index);
+
+ // create the item
+ addEntry(accessor, id, newVersion, index, delta.getValue());
+ }
+
+ public void visit(CDOUnsetFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOListFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOClearFeatureDelta delta)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Clearing"); //$NON-NLS-1$
+ }
+ clearList(accessor, id, oldVersion, newVersion);
+
+ lastIndex = -1;
+ }
+
+ public void visit(CDOContainerFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ private void moveOneUp(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion, int startIndex,
+ int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+
+ for (int index = startIndex; index <= endIndex; ++index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp moving: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index - 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 0:
+ Object value = getValue(accessor, id, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, oldVersion, newVersion, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp add: {0}", index - 1); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, id, newVersion, index - 1, value);
+ break;
+
+ case 1:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp updated: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ break;
+
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void moveOneDown(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion, int startIndex,
+ int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+ for (int index = endIndex; index >= startIndex; --index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown moving: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index + 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 0:
+ Object value = getValue(accessor, id, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, oldVersion, newVersion, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown add: {0}", index + 1); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, id, newVersion, index + 1, value);
+ break;
+
+ case 1:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown updated: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ break;
+
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+ }
+
+ private void addEntry(IDBStoreAccessor accessor, CDOID id, int version, int index, Object value)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Adding value for feature() {0}.{1} index {2} of {3}v{4} : {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, version, value);
+ }
+
+ try
+ {
+ FeatureMap.Entry entry = (FeatureMap.Entry)value;
+ EStructuralFeature entryFeature = entry.getEStructuralFeature();
+ Long tag = getTagByFeature(entryFeature);
+ String column = getColumnName(tag);
+
+ pstmt = statementCache.getPreparedStatement(sqlInsert, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, version);
+ pstmt.setInt(stmtIndex++, index);
+
+ for (int i = 0; i < columnNames.size(); i++)
+ {
+ if (columnNames.get(i).equals(column))
+ {
+ getTypeMapping(tag).setValue(pstmt, stmtIndex++, entry.getValue());
+ }
+ else
+ {
+ pstmt.setNull(stmtIndex++, getDBTypes().get(i).getCode());
+ }
+ }
+
+ pstmt.setInt(stmtIndex++, index);
+ pstmt.setLong(stmtIndex++, tag);
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void removeEntry(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion, int index)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion);
+ }
+
+ try
+ {
+ // try to delete a temporary entry first
+ pstmt = statementCache.getPreparedStatement(sqlDeleteEntry, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, index);
+ pstmt.setInt(stmtIndex++, newVersion);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ if (result == 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry deleted: {0}", index); //$NON-NLS-1$
+ }
+ }
+ else if (result > 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry Too many results: {0}: {1}", index, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ else
+ {
+ // no temporary entry found, so mark the entry as removed
+ statementCache.releasePreparedStatement(pstmt);
+ pstmt = statementCache.getPreparedStatement(sqlRemoveEntry, ReuseProbability.HIGH);
+
+ stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, index);
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ }
+ catch (SQLException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4} FAILED {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4} FAILED {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private FeatureMap.Entry getValue(IDBStoreAccessor accessor, CDOID id, int index)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ FeatureMap.Entry result = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlGetValue, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, index);
+
+ ResultSet resultSet = pstmt.executeQuery();
+ if (!resultSet.next())
+ {
+ throw new DBException("getValue expects exactly one result.");
+ }
+
+ Long tag = resultSet.getLong(1);
+ Object value = getTypeMapping(tag).readValue(resultSet);
+ result = CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value);
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value (index {0}) from result set: {1}", index, result); //$NON-NLS-1$
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ return result;
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java
new file mode 100644
index 0000000000..4510fdd24e
--- /dev/null
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java
@@ -0,0 +1,1061 @@
+/**
+ * Copyright (c) 2004 - 2009 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * This class has been derived from AbstractListTableMapping
+ *
+ * Contributors:
+ * Eike Stepper - initial API and implementation
+ * Lothar Werzinger - Bug 296440: [DB] Change RDB schema to improve scalability of to-many references in audit mode
+ * Stefan Winkler - cleanup, merge and maintenance
+ */
+package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
+
+import org.eclipse.emf.cdo.common.id.CDOID;
+import org.eclipse.emf.cdo.common.id.CDOIDUtil;
+import org.eclipse.emf.cdo.common.revision.CDOList;
+import org.eclipse.emf.cdo.common.revision.CDORevision;
+import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOContainerFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDeltaVisitor;
+import org.eclipse.emf.cdo.common.revision.delta.CDOListFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOMoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDORemoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOSetFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOUnsetFeatureDelta;
+import org.eclipse.emf.cdo.server.IStoreChunkReader.Chunk;
+import org.eclipse.emf.cdo.server.db.CDODBUtil;
+import org.eclipse.emf.cdo.server.db.IDBStoreAccessor;
+import org.eclipse.emf.cdo.server.db.IDBStoreChunkReader;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability;
+import org.eclipse.emf.cdo.server.db.mapping.IListMappingDeltaSupport;
+import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
+import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping;
+import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
+import org.eclipse.emf.cdo.server.internal.db.bundle.OM;
+import org.eclipse.emf.cdo.spi.common.revision.InternalCDOList;
+import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision;
+
+import org.eclipse.net4j.db.DBException;
+import org.eclipse.net4j.db.DBType;
+import org.eclipse.net4j.db.DBUtil;
+import org.eclipse.net4j.db.ddl.IDBField;
+import org.eclipse.net4j.db.ddl.IDBTable;
+import org.eclipse.net4j.db.ddl.IDBIndex.Type;
+import org.eclipse.net4j.util.ImplementationError;
+import org.eclipse.net4j.util.collection.MoveableList;
+import org.eclipse.net4j.util.om.trace.ContextTracer;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EStructuralFeature;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * This is a list-table mapping for audit mode. It is optimized for frequent insert operations at the list's end, which
+ * causes just 1 DB row to be changed. This is achieved by introducing a version range (columns cdo_version_added and
+ * cdo_version_removed) which records for which revisions a particular entry existed. Also, this mapping is mainly
+ * optimized for potentially very large lists: the need for having the complete list stored in memopy to do
+ * in-the-middle-moved and inserts is traded in for a few more DB access operations.
+ *
+ * @author Eike Stepper
+ * @author Stefan Winkler
+ * @author Lothar Werzinger
+ */
+public class AuditListTableMappingWithRanges extends BasicAbstractListTableMapping implements IListMappingDeltaSupport
+{
+ private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, AuditListTableMappingWithRanges.class);
+
+ /**
+ * Used to clean up lists for detached objects.
+ */
+ private static final int FINAL_VERSION = Integer.MAX_VALUE;
+
+ /**
+ * The table of this mapping.
+ */
+ private IDBTable table;
+
+ /**
+ * The type mapping for the value field.
+ */
+ private ITypeMapping typeMapping;
+
+ // --------- SQL strings - see initSqlStrings() -----------------
+ private String sqlSelectChunksPrefix;
+
+ private String sqlOrderByIndex;
+
+ private String sqlInsertEntry;
+
+ private String sqlDeleteEntry;
+
+ private String sqlRemoveEntry;
+
+ private String sqlUpdateIndex;
+
+ private String sqlGetValue;
+
+ private String sqlGetListLastIndex;
+
+ private String sqlClearList;
+
+ private String sqlDeleteList;
+
+ public AuditListTableMappingWithRanges(IMappingStrategy mappingStrategy, EClass eClass, EStructuralFeature feature)
+ {
+ super(mappingStrategy, eClass, feature);
+ initTable();
+ initSqlStrings();
+ }
+
+ private void initTable()
+ {
+ String tableName = getMappingStrategy().getTableName(getContainingClass(), getFeature());
+ table = getMappingStrategy().getStore().getDBSchema().addTable(tableName);
+
+ IDBField[] dbFields = new IDBField[4];
+
+ dbFields[0] = table.addField(CDODBSchema.LIST_REVISION_ID, DBType.BIGINT);
+ dbFields[1] = table.addField(CDODBSchema.LIST_REVISION_VERSION_ADDED, DBType.INTEGER);
+ dbFields[2] = table.addField(CDODBSchema.LIST_REVISION_VERSION_REMOVED, DBType.INTEGER);
+ dbFields[3] = table.addField(CDODBSchema.LIST_IDX, DBType.INTEGER);
+
+ // add field for value
+ typeMapping = getMappingStrategy().createValueMapping(getFeature());
+ typeMapping.createDBField(table, CDODBSchema.LIST_VALUE);
+
+ // TODO think about indexes
+ // add table indexes
+ table.addIndex(Type.UNIQUE, dbFields);
+ }
+
+ public Collection<IDBTable> getDBTables()
+ {
+ return Arrays.asList(table);
+ }
+
+ private void initSqlStrings()
+ {
+ String tableName = getTable().getName();
+
+ // ---------------- read chunks ----------------------------
+ StringBuilder builder = new StringBuilder();
+ builder.append("SELECT "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(" <= ? AND ( "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL OR "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" > ? )"); //$NON-NLS-1$
+ sqlSelectChunksPrefix = builder.toString();
+
+ sqlOrderByIndex = " ORDER BY " + CDODBSchema.LIST_IDX; //$NON-NLS-1$
+
+ // ----------------- count list size --------------------------
+ builder = new StringBuilder("SELECT count(1) FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(" <= ? AND ( "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL OR "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" > ? )"); //$NON-NLS-1$
+ sqlGetListLastIndex = builder.toString();
+
+ // ----------------- insert entry -----------------
+ builder = new StringBuilder("INSERT INTO "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append("(");
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(",");
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(") VALUES (?, ?, NULL, ?, ?)"); //$NON-NLS-1$
+ sqlInsertEntry = builder.toString();
+
+ // ----------------- remove current entry -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(getTable().getName());
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" = ? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlRemoveEntry = builder.toString();
+
+ // ----------------- delete temporary entry -----------------
+ builder = new StringBuilder("DELETE FROM "); //$NON-NLS-1$
+ builder.append(getTable().getName());
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(" = ?"); //$NON-NLS-1$
+ sqlDeleteEntry = builder.toString();
+
+ // ----------------- update index -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(getTable().getName());
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(" = ? WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(" = ?"); //$NON-NLS-1$
+ sqlUpdateIndex = builder.toString();
+
+ // ----------------- get current value -----------------
+ builder = new StringBuilder("SELECT "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(getTable().getName());
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlGetValue = builder.toString();
+
+ // ----------- clear list items -------------------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(getTable().getName());
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" = ? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlClearList = builder.toString();
+
+ // ----------- delete temporary list items -------------------------
+ builder = new StringBuilder("DELETE FROM "); //$NON-NLS-1$
+ builder.append(getTable().getName());
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(" = ? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlDeleteList = builder.toString();
+ }
+
+ protected final IDBTable getTable()
+ {
+ return table;
+ }
+
+ protected final ITypeMapping getTypeMapping()
+ {
+ return typeMapping;
+ }
+
+ public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, int listChunk)
+ {
+ MoveableList<Object> list = revision.getList(getFeature());
+ int listSize = -1;
+
+ if (listChunk != CDORevision.UNCHUNKED)
+ {
+ listSize = getListLastIndex(accessor, revision.getID(), revision.getVersion());
+ if (listSize == -1)
+ {
+ // list is empty - take shortcut
+ return;
+ }
+
+ // subtract amount of items we are going to read now
+ listSize -= listChunk;
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list values for feature {0}.{1} of {2}v{3}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), revision.getID(), revision.getVersion());
+ }
+
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ try
+ {
+ String sql = sqlSelectChunksPrefix + sqlOrderByIndex;
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.HIGH);
+ pstmt.setLong(1, CDOIDUtil.getLong(revision.getID()));
+ pstmt.setInt(2, revision.getVersion());
+ pstmt.setInt(3, revision.getVersion());
+
+ if (listChunk != CDORevision.UNCHUNKED)
+ {
+ pstmt.setMaxRows(listChunk); // optimization - don't read unneeded rows.
+ }
+
+ resultSet = pstmt.executeQuery();
+ while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next())
+ {
+ Object value = typeMapping.readValue(resultSet);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value for index {0} from result set: {1}", list.size(), value); //$NON-NLS-1$
+ }
+
+ list.add(value);
+ }
+
+ while (listSize-- >= 0)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Adding UNINITIALIZED for index {0} ", list.size()); //$NON-NLS-1$
+ }
+
+ list.add(InternalCDOList.UNINITIALIZED);
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading {4} list values done for feature {0}.{1} of {2}v{3}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), revision.getID(), revision.getVersion(), list.size());
+ }
+ }
+
+ /**
+ * Return the last (maximum) list index. (equals to size-1)
+ *
+ * @param accessor
+ * the accessor to use
+ * @param id
+ * the CDOID of the revision to which the getFeature() list belongs
+ * @param version
+ * the revision to which the getFeature() list belongs
+ * @return the last index or <code>-1</code> if the list is empty.
+ */
+ private int getListLastIndex(IDBStoreAccessor accessor, CDOID id, int version)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH);
+ pstmt.setLong(1, CDOIDUtil.getLong(id));
+ pstmt.setInt(2, version);
+ pstmt.setInt(3, version);
+
+ resultSet = pstmt.executeQuery();
+ if (!resultSet.next())
+ {
+ throw new DBException("count expects exactly one result.");
+ }
+
+ int result = resultSet.getInt(1) - 1;
+ if (TRACER.isEnabled())
+ {
+ TRACER.trace("Read list last index = " + result); //$NON-NLS-1$
+ }
+
+ return result;
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values for feature() {0}.{1} of {2}v{3}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), chunkReader.getRevision().getID(), chunkReader.getRevision().getVersion());
+ }
+
+ IPreparedStatementCache statementCache = chunkReader.getAccessor().getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ try
+ {
+ StringBuilder builder = new StringBuilder(sqlSelectChunksPrefix);
+ if (where != null)
+ {
+ builder.append(" AND "); //$NON-NLS-1$
+ builder.append(where);
+ }
+
+ builder.append(sqlOrderByIndex);
+
+ String sql = builder.toString();
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.LOW);
+ pstmt.setLong(1, CDOIDUtil.getLong(chunkReader.getRevision().getID()));
+ pstmt.setInt(2, chunkReader.getRevision().getVersion());
+ pstmt.setInt(3, chunkReader.getRevision().getVersion());
+
+ resultSet = pstmt.executeQuery();
+
+ Chunk chunk = null;
+ int chunkSize = 0;
+ int chunkIndex = 0;
+ int indexInChunk = 0;
+
+ while (resultSet.next())
+ {
+ Object value = typeMapping.readValue(resultSet);
+
+ if (chunk == null)
+ {
+ chunk = chunks.get(chunkIndex++);
+ chunkSize = chunk.size();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Current chunk no. {0} is [start = {1}, size = {2}]", chunkIndex - 1, chunk.getStartIndex(), //$NON-NLS-1$
+ chunkSize);
+ }
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value for chunk index {0} from result set: {1}", indexInChunk, value); //$NON-NLS-1$
+ }
+
+ chunk.add(indexInChunk++, value);
+ if (indexInChunk == chunkSize)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Chunk finished."); //$NON-NLS-1$
+ }
+
+ chunk = null;
+ indexInChunk = 0;
+ }
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values done for feature() {0}.{1} of {2}v{3}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), chunkReader.getRevision().getID(), chunkReader
+ .getRevision().getVersion());
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ public void writeValues(IDBStoreAccessor accessor, InternalCDORevision revision)
+ {
+ CDOList values = revision.getList(getFeature());
+
+ int idx = 0;
+ for (Object element : values)
+ {
+ writeValue(accessor, revision, idx++, element);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Writing done");
+ }
+ }
+
+ protected final void writeValue(IDBStoreAccessor accessor, CDORevision revision, int index, Object value)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER
+ .format(
+ "Writing value for feature {0}.{1} index {2} of {3}v{4} : {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, revision.getID(), revision.getVersion(),
+ value);
+ }
+
+ addEntry(accessor, revision.getID(), revision.getVersion(), index, value);
+ }
+
+ /**
+ * Clear a list of a given revision.
+ *
+ * @param accessor
+ * the accessor to use
+ * @param id
+ * the id of the revision from which to remove all items
+ */
+ public void clearList(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmtDeleteTemp = null;
+ PreparedStatement pstmtClear = null;
+
+ try
+ {
+ // delete temporary entries
+ pstmtDeleteTemp = statementCache.getPreparedStatement(sqlDeleteList, ReuseProbability.HIGH);
+ pstmtDeleteTemp.setLong(1, CDOIDUtil.getLong(id));
+ pstmtDeleteTemp.setInt(2, newVersion);
+
+ int result = CDODBUtil.sqlUpdate(pstmtDeleteTemp, false);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("DeleteList result: {0}", result); //$NON-NLS-1$
+ }
+
+ // clear rest of the list
+ pstmtClear = statementCache.getPreparedStatement(sqlClearList, ReuseProbability.HIGH);
+ pstmtClear.setInt(1, newVersion);
+ pstmtClear.setLong(2, CDOIDUtil.getLong(id));
+
+ result = CDODBUtil.sqlUpdate(pstmtClear, false);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ClearList result: {0}", result); //$NON-NLS-1$
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmtDeleteTemp);
+ statementCache.releasePreparedStatement(pstmtClear);
+ }
+ }
+
+ public void objectRevised(IDBStoreAccessor accessor, CDOID id, long revised)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("objectRevised {0} {1}", id, revised);
+ }
+
+ // get revision from cache to find out version number
+ CDORevision revision = getMappingStrategy().getStore().getRepository().getRevisionManager().getRevision(id, 0,
+ CDORevision.DEPTH_NONE);
+
+ // set cdo_revision_removed for all list items (so we have no NULL values)
+ clearList(accessor, id, revision.getVersion(), FINAL_VERSION);
+ }
+
+ public void processDelta(final IDBStoreAccessor accessor, final CDOID id, int oldVersion, final int newVersion,
+ long created, CDOListFeatureDelta delta)
+ {
+ InternalCDORevision originalRevision = (InternalCDORevision)accessor.getStore().getRepository()
+ .getRevisionManager().getRevision(id, 0, CDORevision.DEPTH_NONE);
+ int oldListSize = originalRevision.getList(getFeature()).size();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ListTableMapping.processDelta for revision {0} - previous list size: {1}", originalRevision,
+ oldListSize);
+ }
+
+ // let the visitor collect the changes
+ ListDeltaVisitor visitor = new ListDeltaVisitor(accessor, originalRevision, oldVersion, newVersion);
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("processing deltas ...");
+ }
+
+ for (CDOFeatureDelta listDelta : delta.getListChanges())
+ {
+ listDelta.accept(visitor);
+ }
+ }
+
+ /**
+ * @author Stefan Winkler
+ */
+ private class ListDeltaVisitor implements CDOFeatureDeltaVisitor
+ {
+ private IDBStoreAccessor accessor;
+
+ private CDOID id;
+
+ private int oldVersion;
+
+ private int newVersion;
+
+ private int lastIndex;
+
+ public ListDeltaVisitor(IDBStoreAccessor accessor, InternalCDORevision originalRevision, int oldVersion,
+ int newVersion)
+ {
+ this.accessor = accessor;
+ id = originalRevision.getID();
+ this.oldVersion = oldVersion;
+ this.newVersion = newVersion;
+ lastIndex = originalRevision.getList(getFeature()).size() - 1;
+ }
+
+ public void visit(CDOMoveFeatureDelta delta)
+ {
+ int fromIdx = delta.getOldPosition();
+ int toIdx = delta.getNewPosition();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Moving: {0} to {1}", fromIdx, toIdx); //$NON-NLS-1$
+ }
+
+ Object value = getValue(accessor, id, fromIdx);
+
+ // remove the item
+ removeEntry(accessor, id, oldVersion, newVersion, fromIdx);
+
+ // adjust indexes and shift either up or down
+ if (fromIdx < toIdx)
+ {
+ moveOneUp(accessor, id, oldVersion, newVersion, fromIdx + 1, toIdx);
+ }
+ else
+ { // fromIdx > toIdx here
+ moveOneDown(accessor, id, oldVersion, newVersion, toIdx, fromIdx - 1);
+ }
+
+ // create the item
+ addEntry(accessor, id, newVersion, toIdx, value);
+ }
+
+ public void visit(CDOAddFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Adding at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ if (startIndex <= endIndex)
+ {
+ // make room for the new item
+ moveOneDown(accessor, id, oldVersion, newVersion, startIndex, endIndex);
+ }
+
+ // create the item
+ addEntry(accessor, id, newVersion, startIndex, delta.getValue());
+
+ ++lastIndex;
+ }
+
+ public void visit(CDORemoveFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Removing at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, oldVersion, newVersion, startIndex);
+
+ // make room for the new item
+ moveOneUp(accessor, id, oldVersion, newVersion, startIndex + 1, endIndex);
+
+ --lastIndex;
+ }
+
+ public void visit(CDOSetFeatureDelta delta)
+ {
+ int index = delta.getIndex();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Setting at: {0}", index); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, oldVersion, newVersion, index);
+
+ // create the item
+ addEntry(accessor, id, newVersion, index, delta.getValue());
+ }
+
+ public void visit(CDOUnsetFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOListFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOClearFeatureDelta delta)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Clearing"); //$NON-NLS-1$
+ }
+
+ clearList(accessor, id, oldVersion, newVersion);
+ lastIndex = -1;
+ }
+
+ public void visit(CDOContainerFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ private void moveOneUp(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion, int startIndex,
+ int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+
+ for (int index = startIndex; index <= endIndex; ++index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp moving: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index - 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 0:
+ Object value = getValue(accessor, id, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, oldVersion, newVersion, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp add: {0}", index - 1); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, id, newVersion, index - 1, value);
+ break;
+
+ case 1:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp updated: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ break;
+
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void moveOneDown(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion, int startIndex,
+ int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+
+ for (int index = endIndex; index >= startIndex; --index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown moving: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index + 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 0:
+ Object value = getValue(accessor, id, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, oldVersion, newVersion, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown add: {0}", index + 1); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, id, newVersion, index + 1, value);
+ break;
+
+ case 1:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown updated: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ break;
+
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+ }
+
+ private void addEntry(IDBStoreAccessor accessor, CDOID id, int version, int index, Object value)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Adding value for feature() {0}.{1} index {2} of {3}v{4} : {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, version, value);
+ }
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlInsertEntry, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, version);
+ pstmt.setInt(stmtIndex++, index);
+ typeMapping.setValue(pstmt, stmtIndex++, value);
+
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void removeEntry(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion, int index)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion);
+ }
+
+ try
+ {
+ // try to delete a temporary entry first
+ pstmt = statementCache.getPreparedStatement(sqlDeleteEntry, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, index);
+ pstmt.setInt(stmtIndex++, newVersion);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ if (result == 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry deleted: {0}", index); //$NON-NLS-1$
+ }
+ }
+ else if (result > 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry Too many results: {0}: {1}", index, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ else
+ {
+ // no temporary entry found, so mark the entry as removed
+ statementCache.releasePreparedStatement(pstmt);
+ pstmt = statementCache.getPreparedStatement(sqlRemoveEntry, ReuseProbability.HIGH);
+
+ stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, index);
+
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ }
+ catch (SQLException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4} FAILED {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4} FAILED {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private Object getValue(IDBStoreAccessor accessor, CDOID id, int index)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ Object result = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlGetValue, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, index);
+
+ ResultSet resultSet = pstmt.executeQuery();
+ if (!resultSet.next())
+ {
+ throw new DBException("getValue expects exactly one result.");
+ }
+
+ result = typeMapping.readValue(resultSet);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value (index {0}) from result set: {1}", index, result); //$NON-NLS-1$
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ return result;
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BasicAbstractListTableMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BasicAbstractListTableMapping.java
new file mode 100644
index 0000000000..10ecb36a61
--- /dev/null
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BasicAbstractListTableMapping.java
@@ -0,0 +1,52 @@
+/**
+ * Copyright (c) 2004 - 2009 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Stefan Winkler - initial API and implementation
+ */
+package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
+
+import org.eclipse.emf.cdo.server.db.mapping.IListMapping;
+import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EStructuralFeature;
+
+/**
+ * @author Stefan Winkler
+ */
+public abstract class BasicAbstractListTableMapping implements IListMapping
+{
+ private IMappingStrategy mappingStrategy;
+
+ private EClass containingClass;
+
+ private EStructuralFeature feature;
+
+ public BasicAbstractListTableMapping(IMappingStrategy mappingStrategy, EClass containingClass,
+ EStructuralFeature feature)
+ {
+ this.mappingStrategy = mappingStrategy;
+ this.containingClass = containingClass;
+ this.feature = feature;
+ }
+
+ public final IMappingStrategy getMappingStrategy()
+ {
+ return mappingStrategy;
+ }
+
+ public final EClass getContainingClass()
+ {
+ return containingClass;
+ }
+
+ public final EStructuralFeature getFeature()
+ {
+ return feature;
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java
index 2a01364ab0..9c43d30a56 100644
--- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java
@@ -8,26 +8,41 @@
* Contributors:
* Eike Stepper - initial API and implementation
* Stefan Winkler - major refactoring
- * Stefan Winkler - 249610: [DB] Support external references (Implementation)
+ * Stefan Winkler - Bug 249610: [DB] Support external references (Implementation)
+ * Lothar Werzinger - Bug 296440: [DB] Change RDB schema to improve scalability of to-many references in audit mode
*/
package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
import org.eclipse.emf.cdo.common.id.CDOID;
import org.eclipse.emf.cdo.common.id.CDOIDUtil;
import org.eclipse.emf.cdo.common.revision.CDORevision;
+import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOContainerFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDeltaVisitor;
+import org.eclipse.emf.cdo.common.revision.delta.CDOListFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOMoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDORemoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOSetFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOUnsetFeatureDelta;
import org.eclipse.emf.cdo.eresource.EresourcePackage;
import org.eclipse.emf.cdo.server.db.CDODBUtil;
import org.eclipse.emf.cdo.server.db.IDBStoreAccessor;
import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability;
import org.eclipse.emf.cdo.server.db.mapping.IClassMapping;
import org.eclipse.emf.cdo.server.db.mapping.IClassMappingAuditSupport;
+import org.eclipse.emf.cdo.server.db.mapping.IClassMappingDeltaSupport;
+import org.eclipse.emf.cdo.server.db.mapping.IListMappingDeltaSupport;
import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping;
import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
import org.eclipse.emf.cdo.server.internal.db.bundle.OM;
import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision;
+import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevisionDelta;
import org.eclipse.net4j.db.DBException;
import org.eclipse.net4j.util.ImplementationError;
+import org.eclipse.net4j.util.om.monitor.OMMonitor;
+import org.eclipse.net4j.util.om.monitor.OMMonitor.Async;
import org.eclipse.net4j.util.om.trace.ContextTracer;
import org.eclipse.emf.ecore.EClass;
@@ -44,7 +59,7 @@ import java.util.Map;
* @since 2.0
*/
public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping implements IClassMapping,
- IClassMappingAuditSupport
+ IClassMappingAuditSupport, IClassMappingDeltaSupport
{
private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, HorizontalAuditClassMapping.class);
@@ -60,6 +75,15 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping
private String sqlReviseAttributes;
+ private ThreadLocal<FeatureDeltaWriter> deltaWriter = new ThreadLocal<FeatureDeltaWriter>()
+ {
+ @Override
+ protected FeatureDeltaWriter initialValue()
+ {
+ return new FeatureDeltaWriter();
+ };
+ };
+
public HorizontalAuditClassMapping(AbstractHorizontalMappingStrategy mappingStrategy, EClass eClass)
{
super(mappingStrategy, eClass);
@@ -475,4 +499,110 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping
accessor.getStatementCache().releasePreparedStatement(stmt);
}
}
+
+ public void writeRevisionDelta(IDBStoreAccessor accessor, InternalCDORevisionDelta delta, long created,
+ OMMonitor monitor)
+ {
+ monitor.begin();
+ Async async = monitor.forkAsync();
+
+ try
+ {
+ FeatureDeltaWriter writer = deltaWriter.get();
+ writer.process(accessor, delta, created);
+ }
+ finally
+ {
+ async.stop();
+ monitor.done();
+ }
+ }
+
+ /**
+ * @author Stefan Winkler
+ */
+ private class FeatureDeltaWriter implements CDOFeatureDeltaVisitor
+ {
+ private IDBStoreAccessor accessor;
+
+ private long created;
+
+ private CDOID id;
+
+ private int oldVersion;
+
+ private int newVersion;
+
+ private InternalCDORevision newRevision;
+
+ public void process(IDBStoreAccessor accessor, InternalCDORevisionDelta delta, long created)
+ {
+ this.accessor = accessor;
+ this.created = created;
+ id = delta.getID();
+ oldVersion = delta.getOriginVersion();
+ newVersion = delta.getDirtyVersion();
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("FeatureDeltaWriter: old version: {0}, new version: {1}", oldVersion, newVersion); //$NON-NLS-1$
+ }
+
+ InternalCDORevision originalRevision = (InternalCDORevision)accessor.getStore().getRepository()
+ .getRevisionManager().getRevision(id, 0, CDORevision.DEPTH_NONE);
+
+ newRevision = (InternalCDORevision)originalRevision.copy();
+
+ newRevision.setVersion(newVersion);
+ newRevision.setCreated(created);
+
+ // process revision delta tree
+ delta.accept(this);
+
+ long revised = newRevision.getCreated() - 1;
+ reviseObject(accessor, id, revised);
+
+ writeValues(accessor, newRevision);
+ }
+
+ public void visit(CDOMoveFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOAddFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDORemoveFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOSetFeatureDelta delta)
+ {
+ delta.apply(newRevision);
+ }
+
+ public void visit(CDOUnsetFeatureDelta delta)
+ {
+ delta.apply(newRevision);
+ }
+
+ public void visit(CDOListFeatureDelta delta)
+ {
+ IListMappingDeltaSupport listMapping = (IListMappingDeltaSupport)getListMapping(delta.getFeature());
+ listMapping.processDelta(accessor, id, oldVersion, newVersion, created, delta);
+ }
+
+ public void visit(CDOClearFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOContainerFeatureDelta delta)
+ {
+ delta.apply(newRevision);
+ }
+ }
}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java
new file mode 100644
index 0000000000..7a846c6416
--- /dev/null
+++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java
@@ -0,0 +1,59 @@
+/**
+ * Copyright (c) 2004 - 2009 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Eike Stepper - initial API and implementation
+ * Stefan Winkler - major refactoring
+ */
+package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
+
+import org.eclipse.emf.cdo.server.db.mapping.IClassMapping;
+import org.eclipse.emf.cdo.server.db.mapping.IListMapping;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EStructuralFeature;
+
+/**
+ * @author Eike Stepper
+ * @since 2.0
+ */
+public class HorizontalAuditMappingStrategyWithRanges extends AbstractHorizontalMappingStrategy
+{
+ public HorizontalAuditMappingStrategyWithRanges()
+ {
+ }
+
+ @Override
+ public boolean hasDeltaSupport()
+ {
+ return true;
+ }
+
+ @Override
+ public boolean hasAuditSupport()
+ {
+ return true;
+ }
+
+ @Override
+ public IClassMapping doCreateClassMapping(EClass eClass)
+ {
+ return new HorizontalAuditClassMapping(this, eClass);
+ }
+
+ @Override
+ public IListMapping doCreateListMapping(EClass containingClass, EStructuralFeature feature)
+ {
+ return new AuditListTableMappingWithRanges(this, containingClass, feature);
+ }
+
+ @Override
+ public IListMapping doCreateFeatureMapMapping(EClass containingClass, EStructuralFeature feature)
+ {
+ return new AuditFeatureMapTableMappingWithRanges(this, containingClass, feature);
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java b/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java
index 8b17c241fc..0ad495418e 100644
--- a/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java
+++ b/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java
@@ -7,10 +7,10 @@
*
* Contributors:
* Eike Stepper - initial API and implementation
+ * Stefan Winkler - introduced variable mapping strategies
*/
package org.eclipse.emf.cdo.tests.db;
-import org.eclipse.emf.cdo.server.db.CDODBUtil;
import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
import org.eclipse.net4j.db.DBUtil;
@@ -45,7 +45,8 @@ public class AllTestsDBH2 extends DBConfigs
@Override
protected void initConfigSuites(TestSuite parent)
{
- addScenario(parent, COMBINED, AllTestsDBH2.H2.ReusableFolder.INSTANCE, JVM, NATIVE);
+ addScenario(parent, COMBINED, AllTestsDBH2.H2.ReusableFolder.AUDIT_INSTANCE, JVM, NATIVE);
+ addScenario(parent, COMBINED, AllTestsDBH2.H2.ReusableFolder.RANGE_INSTANCE, JVM, NATIVE);
}
/**
@@ -55,19 +56,32 @@ public class AllTestsDBH2 extends DBConfigs
{
private static final long serialVersionUID = 1L;
- public static final AllTestsDBH2.H2 INSTANCE = new H2("DBStore: H2");
+ public static final AllTestsDBH2.H2 INSTANCE = new H2("DBStore: H2 (audit)",
+ "org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalAuditMappingStrategy");
protected transient File dbFolder;
- public H2(String name)
+ private String mappingStrategy;
+
+ public H2(String name, String mappingStrategy)
{
super(name);
+ this.mappingStrategy = mappingStrategy;
}
+ @SuppressWarnings("unchecked")
@Override
protected IMappingStrategy createMappingStrategy()
{
- return CDODBUtil.createHorizontalMappingStrategy(true);
+ try
+ {
+ Class<IMappingStrategy> clazz = (Class<IMappingStrategy>)Class.forName(mappingStrategy);
+ return clazz.newInstance();
+ }
+ catch (Exception ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
}
@Override
@@ -107,7 +121,12 @@ public class AllTestsDBH2 extends DBConfigs
{
private static final long serialVersionUID = 1L;
- public static final ReusableFolder INSTANCE = new ReusableFolder("DBStore: H2 (Reusable Folder)");
+ public static final ReusableFolder AUDIT_INSTANCE = new ReusableFolder("DBStore: H2 (Reusable Folder, audit), ",
+ "org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalAuditMappingStrategy");
+
+ public static final ReusableFolder RANGE_INSTANCE = new ReusableFolder(
+ "DBStore: H2 (Reusable Folder, audit, range-based mapping strategy)",
+ "org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalAuditMappingStrategyWithRanges");
private static File reusableFolder;
@@ -115,9 +134,9 @@ public class AllTestsDBH2 extends DBConfigs
private transient ArrayList<String> repoNames = new ArrayList<String>();
- public ReusableFolder(String name)
+ public ReusableFolder(String name, String mappingStrategy)
{
- super(name);
+ super(name, mappingStrategy);
}
@Override
@@ -138,6 +157,7 @@ public class AllTestsDBH2 extends DBConfigs
Connection conn = null;
Statement stmt = null;
+
try
{
conn = defaultDataSource.getConnection();
@@ -157,7 +177,6 @@ public class AllTestsDBH2 extends DBConfigs
JdbcDataSource dataSource = new JdbcDataSource();
dataSource.setURL("jdbc:h2:" + dbFolder.getAbsolutePath() + "/h2test;SCHEMA=" + repoName);
-
return dataSource;
}

Back to the top