diff options
25 files changed, 4374 insertions, 545 deletions
diff --git a/plugins/org.eclipse.emf.cdo.server.db/plugin.xml b/plugins/org.eclipse.emf.cdo.server.db/plugin.xml index 8cd042c06a..fd37688bba 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/plugin.xml +++ b/plugins/org.eclipse.emf.cdo.server.db/plugin.xml @@ -50,6 +50,10 @@ class="org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalAuditMappingStrategyWithRanges" type="horizontalAuditWithRanges"> </mappingStrategy> + <mappingStrategy + class="org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalBranchingMappingStrategyWithRanges" + type="horizontalBranchingWithRanges"> + </mappingStrategy> </extension> </plugin> diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/db/mapping/IListMappingDeltaSupport.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/db/mapping/IListMappingDeltaSupport.java index 60f9e8e557..137556ff76 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/db/mapping/IListMappingDeltaSupport.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/db/mapping/IListMappingDeltaSupport.java @@ -8,6 +8,7 @@ * Contributors: * Eike Stepper - initial API and implementation * Stefan Winkler - 271444: [DB] Multiple refactorings bug 271444 + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.db.mapping; @@ -39,7 +40,8 @@ public interface IListMappingDeltaSupport * the creation date for the new revision * @param delta * the {@link CDOListFeatureDelta} which contains the list deltas. + * @since 4.0 */ - public void processDelta(IDBStoreAccessor accessor, CDOID id, int oldVersion, int newVersion, long created, - CDOListFeatureDelta delta); + public void processDelta(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion, + long created, CDOListFeatureDelta delta); } diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java index 8ccc649b70..a2961d8c2c 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractFeatureMapTableMapping.java @@ -11,6 +11,7 @@ * Christopher Albert - 254455: [DB] Support FeatureMaps bug 254455 * Victor Roldan Betancort - Bug 283998: [DB] Chunk reading for multiple chunks fails * Stefan Winkler - Bug 285426: [DB] Implement user-defined typeMapping support + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -28,7 +29,6 @@ import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy; import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping; import org.eclipse.emf.cdo.server.internal.db.CDODBSchema; import org.eclipse.emf.cdo.server.internal.db.bundle.OM; -import org.eclipse.emf.cdo.spi.common.revision.InternalCDOList; import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision; import org.eclipse.net4j.db.DBException; @@ -93,8 +93,6 @@ public abstract class AbstractFeatureMapTableMapping extends BasicAbstractListTa protected String sqlInsert; - private String sqlGetListLastIndex; - private List<DBType> dbTypes; public AbstractFeatureMapTableMapping(IMappingStrategy mappingStrategy, EClass eClass, EStructuralFeature feature) @@ -208,31 +206,6 @@ public abstract class AbstractFeatureMapTableMapping extends BasicAbstractListTa sqlOrderByIndex = " ORDER BY " + CDODBSchema.FEATUREMAP_IDX; //$NON-NLS-1$ - // ----------------- count list size -------------------------- - - builder = new StringBuilder("SELECT max("); - builder.append(CDODBSchema.FEATUREMAP_IDX); - builder.append(") FROM "); - builder.append(tableName); - builder.append(" WHERE "); - - for (int i = 0; i < fields.length; i++) - { - builder.append(fields[i].getName()); - if (i + 1 < fields.length) - { - // more to come - builder.append("=? AND "); - } - else - { - // last one - builder.append("=? "); - } - } - - sqlGetListLastIndex = builder.toString(); - // INSERT with dynamic field name // TODO: Better: universal INSERT-Statement, because of stmt caching! @@ -293,19 +266,10 @@ public abstract class AbstractFeatureMapTableMapping extends BasicAbstractListTa public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, int listChunk) { MoveableList<Object> list = revision.getList(getFeature()); - int listSize = -1; - - if (listChunk != CDORevision.UNCHUNKED) + if (listChunk == 0 || list.size() == 0) { - listSize = getListLastIndex(accessor, revision); - if (listSize == -1) - { - // list is empty - take shortcut - return; - } - - // subtract amount of items we are going to read now - listSize -= listChunk; + // nothing to read take shortcut + return; } if (TRACER.isEnabled()) @@ -330,6 +294,8 @@ public abstract class AbstractFeatureMapTableMapping extends BasicAbstractListTa } resultSet = pstmt.executeQuery(); + int currentIndex = 0; + while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next()) { Long tag = resultSet.getLong(1); @@ -340,17 +306,7 @@ public abstract class AbstractFeatureMapTableMapping extends BasicAbstractListTa TRACER.format("Read value for index {0} from result set: {1}", list.size(), value); } - list.add(CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value)); - } - - while (listSize-- >= 0) - { - if (TRACER.isEnabled()) - { - TRACER.format("Adding UNINITIALIZED for index {0} ", list.size()); - } - - list.add(InternalCDOList.UNINITIALIZED); + list.set(currentIndex++, CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value)); } } catch (SQLException ex) @@ -382,56 +338,6 @@ public abstract class AbstractFeatureMapTableMapping extends BasicAbstractListTa typeMappings.put(tag, typeMapping); } - /** - * Return the last (maximum) list index. (euals to size-1) - * - * @param accessor - * the accessor to use - * @param revision - * the revision to which the feature list belongs - * @return the last index or <code>-1</code> if the list is empty. - */ - private int getListLastIndex(IDBStoreAccessor accessor, InternalCDORevision revision) - { - IPreparedStatementCache statementCache = accessor.getStatementCache(); - PreparedStatement pstmt = null; - ResultSet resultSet = null; - - try - { - pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH); - setKeyFields(pstmt, revision); - - resultSet = pstmt.executeQuery(); - if (!resultSet.next()) - { - if (TRACER.isEnabled()) - { - TRACER.trace("No last index found -> list is empty. "); - } - - return -1; - } - - int result = resultSet.getInt(1); - if (TRACER.isEnabled()) - { - TRACER.trace("Read list last index = " + result); - } - - return result; - } - catch (SQLException ex) - { - throw new DBException(ex); - } - finally - { - DBUtil.close(resultSet); - statementCache.releasePreparedStatement(pstmt); - } - } - public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where) { if (TRACER.isEnabled()) diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java index cbd56da690..7bfc9433b9 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractHorizontalClassMapping.java @@ -9,7 +9,7 @@ * Eike Stepper - initial API and implementation * Stefan Winkler - 271444: [DB] Multiple refactorings bug 271444 * Stefan Winkler - 249610: [DB] Support external references (Implementation) - * + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -20,6 +20,7 @@ import org.eclipse.emf.cdo.common.branch.CDOBranchVersion; import org.eclipse.emf.cdo.common.id.CDOID; import org.eclipse.emf.cdo.common.id.CDOIDUtil; import org.eclipse.emf.cdo.common.model.CDOModelUtil; +import org.eclipse.emf.cdo.common.revision.CDOList; import org.eclipse.emf.cdo.common.revision.CDORevision; import org.eclipse.emf.cdo.common.revision.CDORevisionHandler; import org.eclipse.emf.cdo.common.revision.CDORevisionManager; @@ -40,6 +41,7 @@ import org.eclipse.emf.cdo.server.internal.db.CDODBSchema; import org.eclipse.emf.cdo.server.internal.db.DBStore; import org.eclipse.emf.cdo.server.internal.db.bundle.OM; import org.eclipse.emf.cdo.spi.common.commit.CDOChangeSetSegment; +import org.eclipse.emf.cdo.spi.common.revision.InternalCDOList; import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision; import org.eclipse.net4j.db.DBException; @@ -57,6 +59,8 @@ import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.ecore.util.FeatureMapUtil; +import org.eclipse.core.runtime.Assert; + import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; @@ -87,6 +91,8 @@ public abstract class AbstractHorizontalClassMapping implements IClassMapping private List<IListMapping> listMappings; + private Map<EStructuralFeature, String> listSizeFields; + private Map<EStructuralFeature, String> unsettableFields; private String sqlSelectForHandle; @@ -216,14 +222,20 @@ public abstract class AbstractHorizontalClassMapping implements IClassMapping { if (feature.isMany()) { + IListMapping mapping = null; if (FeatureMapUtil.isFeatureMap(feature)) { - listMappings.add(mappingStrategy.createFeatureMapMapping(eClass, feature)); + mapping = mappingStrategy.createFeatureMapMapping(eClass, feature); } else { - listMappings.add(mappingStrategy.createListMapping(eClass, feature)); + mapping = mappingStrategy.createListMapping(eClass, feature); } + + listMappings.add(mapping); + + // add field for list sizes + createListSizeField(feature); } } @@ -231,6 +243,22 @@ public abstract class AbstractHorizontalClassMapping implements IClassMapping } /** + * Create an integer field in the attribute tabel for the list size of the associated list mapping. + */ + private void createListSizeField(EStructuralFeature feature) + { + if (listSizeFields == null) + { + listSizeFields = new LinkedHashMap<EStructuralFeature, String>(); + } + + String fieldName = mappingStrategy.getFieldName(feature); + table.addField(fieldName, DBType.INTEGER); + + listSizeFields.put(feature, fieldName); + } + + /** * Read the revision's values from the DB. * * @return <code>true</code> if the revision has been read successfully.<br> @@ -291,6 +319,29 @@ public abstract class AbstractHorizontalClassMapping implements IClassMapping mapping.readValueToRevision(resultSet, revision); } + if (listSizeFields != null) + { + for (Map.Entry<EStructuralFeature, String> listSizeEntry : listSizeFields.entrySet()) + { + EStructuralFeature feature = listSizeEntry.getKey(); + String fieldName = listSizeEntry.getValue(); + int size = resultSet.getInt(fieldName); + + // ensure the listSize (TODO: remove assertion) + CDOList list = revision.getList(feature, size); + + for (int i = 0; i < size; i++) + { + list.add(InternalCDOList.UNINITIALIZED); + } + + if (list.size() != size) + { + Assert.isTrue(false); + } + } + } + return true; } catch (SQLException ex) @@ -336,6 +387,11 @@ public abstract class AbstractHorizontalClassMapping implements IClassMapping return unsettableFields; } + protected final Map<EStructuralFeature, String> getListSizeFields() + { + return listSizeFields; + } + public final List<ITypeMapping> getValueMappings() { return valueMappings; diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java index b4bf2a6400..04b1c46de5 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AbstractListTableMapping.java @@ -9,6 +9,7 @@ * Eike Stepper - initial API and implementation * Stefan Winkler - Bug 271444: [DB] Multiple refactorings * Stefan Winkler - Bug 283998: [DB] Chunk reading for multiple chunks fails + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -27,7 +28,6 @@ import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy; import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping; import org.eclipse.emf.cdo.server.internal.db.CDODBSchema; import org.eclipse.emf.cdo.server.internal.db.bundle.OM; -import org.eclipse.emf.cdo.spi.common.revision.InternalCDOList; import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision; import org.eclipse.net4j.db.DBException; @@ -78,8 +78,6 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap private String sqlInsertEntry; - private String sqlGetListLastIndex; - public AbstractListTableMapping(IMappingStrategy mappingStrategy, EClass eClass, EStructuralFeature feature) { super(mappingStrategy, eClass, feature); @@ -154,31 +152,6 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap sqlOrderByIndex = " ORDER BY " + CDODBSchema.LIST_IDX; //$NON-NLS-1$ - // ----------------- count list size -------------------------- - - builder = new StringBuilder("SELECT MAX("); //$NON-NLS-1$ - builder.append(CDODBSchema.LIST_IDX); - builder.append(") FROM "); //$NON-NLS-1$ - builder.append(tableName); - builder.append(" WHERE "); //$NON-NLS-1$ - - for (int i = 0; i < fields.length; i++) - { - builder.append(fields[i].getName()); - if (i + 1 < fields.length) - { - // more to come - builder.append("=? AND "); //$NON-NLS-1$ - } - else - { - // last one - builder.append("=? "); //$NON-NLS-1$ - } - } - - sqlGetListLastIndex = builder.toString(); - // ----------------- INSERT - reference entry ----------------- builder = new StringBuilder("INSERT INTO "); //$NON-NLS-1$ builder.append(tableName); @@ -216,19 +189,11 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, int listChunk) { MoveableList<Object> list = revision.getList(getFeature()); - int listSize = -1; - if (listChunk != CDORevision.UNCHUNKED) + if (listChunk == 0 || list.size() == 0) { - listSize = getListLastIndex(accessor, revision); - if (listSize == -1) - { - // list is empty - take shortcut - return; - } - - // subtract amount of items we are going to read now - listSize -= listChunk; + // nothing to read take shortcut + return; } if (TRACER.isEnabled()) @@ -258,6 +223,8 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap } resultSet = pstmt.executeQuery(); + + int currentIndex = 0; while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next()) { Object value = typeMapping.readValue(resultSet); @@ -266,17 +233,7 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap TRACER.format("Read value for index {0} from result set: {1}", list.size(), value); //$NON-NLS-1$ } - list.add(value); - } - - while (listSize-- >= 0) - { - if (TRACER.isEnabled()) - { - TRACER.format("Adding UNINITIALIZED for index {0} ", list.size()); //$NON-NLS-1$ - } - - list.add(InternalCDOList.UNINITIALIZED); + list.set(currentIndex++, value); } } catch (SQLException ex) @@ -296,71 +253,6 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap } } - /** - * Return the last (maximum) list index. (euals to size-1) - * - * @param accessor - * the accessor to use - * @param revision - * the revision to which the feature list belongs - * @return the last index or <code>-1</code> if the list is empty. - */ - private int getListLastIndex(IDBStoreAccessor accessor, InternalCDORevision revision) - { - IPreparedStatementCache statementCache = accessor.getStatementCache(); - PreparedStatement pstmt = null; - ResultSet resultSet = null; - - try - { - pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH); - setKeyFields(pstmt, revision); - - if (TRACER.isEnabled()) - { - TRACER.trace(pstmt.toString()); - } - - resultSet = pstmt.executeQuery(); - if (!resultSet.next()) - { - if (TRACER.isEnabled()) - { - TRACER.trace("No last index found -> list is empty. "); //$NON-NLS-1$ - } - - return -1; - } - - int result = resultSet.getInt(1); - if (resultSet.wasNull()) - { - if (TRACER.isEnabled()) - { - TRACER.trace("No last index found -> list is empty. NULL "); //$NON-NLS-1$ - } - - return -1; - } - - if (TRACER.isEnabled()) - { - TRACER.trace("Read list last index = " + result); //$NON-NLS-1$ - } - - return result; - } - catch (SQLException ex) - { - throw new DBException(ex); - } - finally - { - DBUtil.close(resultSet); - statementCache.releasePreparedStatement(pstmt); - } - } - public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where) { if (TRACER.isEnabled()) @@ -494,7 +386,7 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap { String tableName = getTable().getName(); String listJoin = getMappingStrategy().getListJoin("a_t", "l_t"); - + StringBuilder builder = new StringBuilder(); builder.append("SELECT l_t."); //$NON-NLS-1$ builder.append(CDODBSchema.LIST_REVISION_ID); @@ -514,10 +406,10 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap builder.append(" IN "); //$NON-NLS-1$ builder.append(idString); String sql = builder.toString(); - + ResultSet resultSet = null; Statement stmt = null; - + try { stmt = accessor.getConnection().createStatement(); @@ -525,7 +417,7 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap { TRACER.format("Query XRefs (list): {0}", sql); } - + resultSet = stmt.executeQuery(sql); while (resultSet.next()) { @@ -534,24 +426,24 @@ public abstract class AbstractListTableMapping extends BasicAbstractListTableMap idLong = resultSet.getLong(2); CDOID targetId = CDOIDUtil.createLong(idLong); int idx = resultSet.getInt(3); - + boolean more = context.addXRef(targetId, srcId, (EReference)getFeature(), idx); if (TRACER.isEnabled()) { TRACER.format(" add XRef to context: src={0}, tgt={1}, idx={2}", srcId, targetId, idx); } - + if (!more) { if (TRACER.isEnabled()) { TRACER.format(" result limit reached. Ignoring further results."); } - + return false; } } - + return true; } catch (SQLException ex) diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java index 5fd72fe46a..a8f8ada17f 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditFeatureMapTableMappingWithRanges.java @@ -13,6 +13,7 @@ * Lothar Werzinger - Bug 296440: [DB] Change RDB schema to improve scalability of to-many references in audit mode * Stefan Winkler - cleanup, merge and maintenance * Stefan Winkler - Bug 285426: [DB] Implement user-defined typeMapping support + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -45,7 +46,6 @@ import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy; import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping; import org.eclipse.emf.cdo.server.internal.db.CDODBSchema; import org.eclipse.emf.cdo.server.internal.db.bundle.OM; -import org.eclipse.emf.cdo.spi.common.revision.InternalCDOList; import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision; import org.eclipse.net4j.db.DBException; @@ -124,8 +124,6 @@ public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTabl protected String sqlInsert; - private String sqlGetListLastIndex; - private List<DBType> dbTypes; private String sqlRemoveEntry; @@ -235,21 +233,6 @@ public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTabl sqlOrderByIndex = " ORDER BY " + CDODBSchema.FEATUREMAP_IDX; //$NON-NLS-1$ - // ----------------- count list size -------------------------- - - builder = new StringBuilder("SELECT count(1) FROM "); //$NON-NLS-1$ - builder.append(tableName); - builder.append(" WHERE "); //$NON-NLS-1$ - builder.append(CDODBSchema.FEATUREMAP_REVISION_ID); - builder.append("=? AND "); //$NON-NLS-1$ - builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED); - builder.append("<=? AND ("); //$NON-NLS-1$ - builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED); - builder.append(" IS NULL OR "); //$NON-NLS-1$ - builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED); - builder.append(">?)"); //$NON-NLS-1$ - sqlGetListLastIndex = builder.toString(); - // ----------------- INSERT - prefix ----------------- builder = new StringBuilder("INSERT INTO "); //$NON-NLS-1$ builder.append(tableName); @@ -403,19 +386,11 @@ public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTabl public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, int listChunk) { MoveableList<Object> list = revision.getList(getFeature()); - int listSize = -1; - if (listChunk != CDORevision.UNCHUNKED) + if (listChunk == 0 || list.size() == 0) { - listSize = getListLastIndex(accessor, revision); - if (listSize == -1) - { - // list is empty - take shortcut - return; - } - - // subtract amount of items we are going to read now - listSize -= listChunk; + // nothing to read take shortcut + return; } if (TRACER.isEnabled()) @@ -444,6 +419,8 @@ public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTabl } resultSet = pstmt.executeQuery(); + + int currentIndex = 0; while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next()) { Long tag = resultSet.getLong(1); @@ -454,17 +431,7 @@ public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTabl TRACER.format("Read value for index {0} from result set: {1}", list.size(), value); //$NON-NLS-1$ } - list.add(CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value)); - } - - while (listSize-- >= 0) - { - if (TRACER.isEnabled()) - { - TRACER.format("Adding UNINITIALIZED for index {0} ", list.size()); //$NON-NLS-1$ - } - - list.add(InternalCDOList.UNINITIALIZED); + list.set(currentIndex++, CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value)); } } catch (SQLException ex) @@ -496,54 +463,6 @@ public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTabl typeMappings.put(tag, typeMapping); } - /** - * Return the last (maximum) list index. (euals to size-1) - * - * @param accessor - * the accessor to use - * @param revision - * the revision to which the feature list belongs - * @return the last index or <code>-1</code> if the list is empty. - */ - private int getListLastIndex(IDBStoreAccessor accessor, InternalCDORevision revision) - { - IPreparedStatementCache statementCache = accessor.getStatementCache(); - PreparedStatement pstmt = null; - ResultSet resultSet = null; - - try - { - pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH); - - pstmt.setLong(1, CDOIDUtil.getLong(revision.getID())); - pstmt.setInt(2, revision.getVersion()); - pstmt.setInt(3, revision.getVersion()); - - resultSet = pstmt.executeQuery(); - if (!resultSet.next()) - { - throw new DBException("Count expects exactly one result"); - } - - int result = resultSet.getInt(1) - 1; - if (TRACER.isEnabled()) - { - TRACER.trace("Read list last index = " + result); //$NON-NLS-1$ - } - - return result; - } - catch (SQLException ex) - { - throw new DBException(ex); - } - finally - { - DBUtil.close(resultSet); - statementCache.releasePreparedStatement(pstmt); - } - } - public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where) { if (TRACER.isEnabled()) @@ -783,8 +702,8 @@ public class AuditFeatureMapTableMappingWithRanges extends BasicAbstractListTabl clearList(accessor, id, revision.getVersion(), FINAL_VERSION); } - public void processDelta(final IDBStoreAccessor accessor, final CDOID id, int oldVersion, final int newVersion, - long created, CDOListFeatureDelta delta) + public void processDelta(final IDBStoreAccessor accessor, final CDOID id, final int branchId, int oldVersion, + final int newVersion, long created, CDOListFeatureDelta delta) { IRepository repo = accessor.getStore().getRepository(); InternalCDORevision originalRevision = (InternalCDORevision)repo.getRevisionManager().getRevision(id, diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java index 6587358d9e..1ecd242ac2 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/AuditListTableMappingWithRanges.java @@ -11,6 +11,7 @@ * Eike Stepper - initial API and implementation * Lothar Werzinger - Bug 296440: [DB] Change RDB schema to improve scalability of to-many references in audit mode * Stefan Winkler - cleanup, merge and maintenance + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -42,7 +43,6 @@ import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy; import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping; import org.eclipse.emf.cdo.server.internal.db.CDODBSchema; import org.eclipse.emf.cdo.server.internal.db.bundle.OM; -import org.eclipse.emf.cdo.spi.common.revision.InternalCDOList; import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision; import org.eclipse.net4j.db.DBException; @@ -56,11 +56,13 @@ import org.eclipse.net4j.util.collection.MoveableList; import org.eclipse.net4j.util.om.trace.ContextTracer; import org.eclipse.emf.ecore.EClass; +import org.eclipse.emf.ecore.EReference; import org.eclipse.emf.ecore.EStructuralFeature; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.Statement; import java.util.Arrays; import java.util.Collection; import java.util.List; @@ -110,8 +112,6 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi private String sqlGetValue; - private String sqlGetListLastIndex; - private String sqlClearList; private String sqlDeleteList; @@ -172,20 +172,6 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi sqlOrderByIndex = " ORDER BY " + CDODBSchema.LIST_IDX; //$NON-NLS-1$ - // ----------------- count list size -------------------------- - builder = new StringBuilder("SELECT count(1) FROM "); //$NON-NLS-1$ - builder.append(tableName); - builder.append(" WHERE "); //$NON-NLS-1$ - builder.append(CDODBSchema.LIST_REVISION_ID); - builder.append("=? AND "); //$NON-NLS-1$ - builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED); - builder.append("<=? AND ("); //$NON-NLS-1$ - builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED); - builder.append(" IS NULL OR "); //$NON-NLS-1$ - builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED); - builder.append(">?)"); //$NON-NLS-1$ - sqlGetListLastIndex = builder.toString(); - // ----------------- insert entry ----------------- builder = new StringBuilder("INSERT INTO "); //$NON-NLS-1$ builder.append(tableName); @@ -296,19 +282,10 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, int listChunk) { MoveableList<Object> list = revision.getList(getFeature()); - int listSize = -1; - - if (listChunk != CDORevision.UNCHUNKED) + if (listChunk == 0 || list.size() == 0) { - listSize = getListLastIndex(accessor, revision.getID(), revision.getVersion()); - if (listSize == -1) - { - // list is empty - take shortcut - return; - } - - // subtract amount of items we are going to read now - listSize -= listChunk; + // nothing to read take shortcut + return; } if (TRACER.isEnabled()) @@ -335,6 +312,8 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi } resultSet = pstmt.executeQuery(); + + int currentIndex = 0; while ((listChunk == CDORevision.UNCHUNKED || --listChunk >= 0) && resultSet.next()) { Object value = typeMapping.readValue(resultSet); @@ -343,17 +322,7 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi TRACER.format("Read value for index {0} from result set: {1}", list.size(), value); //$NON-NLS-1$ } - list.add(value); - } - - while (listSize-- >= 0) - { - if (TRACER.isEnabled()) - { - TRACER.format("Adding UNINITIALIZED for index {0} ", list.size()); //$NON-NLS-1$ - } - - list.add(InternalCDOList.UNINITIALIZED); + list.set(currentIndex++, value); } } catch (SQLException ex) @@ -373,55 +342,6 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi } } - /** - * Return the last (maximum) list index. (equals to size-1) - * - * @param accessor - * the accessor to use - * @param id - * the CDOID of the revision to which the getFeature() list belongs - * @param version - * the revision to which the getFeature() list belongs - * @return the last index or <code>-1</code> if the list is empty. - */ - private int getListLastIndex(IDBStoreAccessor accessor, CDOID id, int version) - { - IPreparedStatementCache statementCache = accessor.getStatementCache(); - PreparedStatement pstmt = null; - ResultSet resultSet = null; - - try - { - pstmt = statementCache.getPreparedStatement(sqlGetListLastIndex, ReuseProbability.HIGH); - pstmt.setLong(1, CDOIDUtil.getLong(id)); - pstmt.setInt(2, version); - pstmt.setInt(3, version); - - resultSet = pstmt.executeQuery(); - if (!resultSet.next()) - { - throw new DBException("Count expects exactly one result"); - } - - int result = resultSet.getInt(1) - 1; - if (TRACER.isEnabled()) - { - TRACER.trace("Read list last index = " + result); //$NON-NLS-1$ - } - - return result; - } - catch (SQLException ex) - { - throw new DBException(ex); - } - finally - { - DBUtil.close(resultSet); - statementCache.releasePreparedStatement(pstmt); - } - } - public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where) { if (TRACER.isEnabled()) @@ -606,8 +526,8 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi clearList(accessor, id, revision.getVersion(), FINAL_VERSION); } - public void processDelta(final IDBStoreAccessor accessor, final CDOID id, int oldVersion, final int newVersion, - long created, CDOListFeatureDelta delta) + public void processDelta(final IDBStoreAccessor accessor, final CDOID id, final int branchId, int oldVersion, + final int newVersion, long created, CDOListFeatureDelta delta) { IRepository repo = accessor.getStore().getRepository(); InternalCDORevision originalRevision = (InternalCDORevision)repo.getRevisionManager().getRevision(id, @@ -1080,6 +1000,77 @@ public class AuditListTableMappingWithRanges extends BasicAbstractListTableMappi public final boolean queryXRefs(IDBStoreAccessor accessor, String mainTableName, String mainTableWhere, QueryXRefsContext context, String idString) { - throw new UnsupportedOperationException("This mapping strategy does not support cross-reference querying."); + + String tableName = getTable().getName(); + String listJoin = getMappingStrategy().getListJoin("a_t", "l_t"); + + StringBuilder builder = new StringBuilder(); + builder.append("SELECT l_t."); //$NON-NLS-1$ + builder.append(CDODBSchema.LIST_REVISION_ID); + builder.append(", l_t."); //$NON-NLS-1$ + builder.append(CDODBSchema.LIST_VALUE); + builder.append(", l_t."); //$NON-NLS-1$ + builder.append(CDODBSchema.LIST_IDX); + builder.append(" FROM "); //$NON-NLS-1$ + builder.append(tableName); + builder.append(" AS l_t, ");//$NON-NLS-1$ + builder.append(mainTableName); + builder.append(" AS a_t WHERE ");//$NON-NLS-1$ + builder.append("a_t." + mainTableWhere);//$NON-NLS-1$ + builder.append(listJoin); + builder.append(" AND "); //$NON-NLS-1$ + builder.append(CDODBSchema.LIST_VALUE); + builder.append(" IN "); //$NON-NLS-1$ + builder.append(idString); + String sql = builder.toString(); + + ResultSet resultSet = null; + Statement stmt = null; + + try + { + stmt = accessor.getConnection().createStatement(); + if (TRACER.isEnabled()) + { + TRACER.format("Query XRefs (list): {0}", sql); + } + + resultSet = stmt.executeQuery(sql); + while (resultSet.next()) + { + long idLong = resultSet.getLong(1); + CDOID srcId = CDOIDUtil.createLong(idLong); + idLong = resultSet.getLong(2); + CDOID targetId = CDOIDUtil.createLong(idLong); + int idx = resultSet.getInt(3); + + boolean more = context.addXRef(targetId, srcId, (EReference)getFeature(), idx); + if (TRACER.isEnabled()) + { + TRACER.format(" add XRef to context: src={0}, tgt={1}, idx={2}", srcId, targetId, idx); + } + + if (!more) + { + if (TRACER.isEnabled()) + { + TRACER.format(" result limit reached. Ignoring further results."); + } + + return false; + } + } + + return true; + } + catch (SQLException ex) + { + throw new DBException(ex); + } + finally + { + DBUtil.close(resultSet); + DBUtil.close(stmt); + } } } diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BranchingFeatureMapTableMappingWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BranchingFeatureMapTableMappingWithRanges.java new file mode 100644 index 0000000000..0cc0a1301b --- /dev/null +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BranchingFeatureMapTableMappingWithRanges.java @@ -0,0 +1,1483 @@ +/**
+ * Copyright (c) 2004 - 2010 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Stefan Winkler - initial API and implementation taken from AuditFeatureMapTableMappingWithRanges
+ * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy
+ */
+package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
+
+import org.eclipse.emf.cdo.common.branch.CDOBranchPoint;
+import org.eclipse.emf.cdo.common.id.CDOID;
+import org.eclipse.emf.cdo.common.id.CDOIDUtil;
+import org.eclipse.emf.cdo.common.revision.CDOList;
+import org.eclipse.emf.cdo.common.revision.CDORevision;
+import org.eclipse.emf.cdo.common.revision.CDORevisionUtil;
+import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOContainerFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDeltaVisitor;
+import org.eclipse.emf.cdo.common.revision.delta.CDOListFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOMoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDORemoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOSetFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOUnsetFeatureDelta;
+import org.eclipse.emf.cdo.server.IStoreAccessor.QueryXRefsContext;
+import org.eclipse.emf.cdo.server.IStoreChunkReader;
+import org.eclipse.emf.cdo.server.IStoreChunkReader.Chunk;
+import org.eclipse.emf.cdo.server.db.CDODBUtil;
+import org.eclipse.emf.cdo.server.db.IDBStoreAccessor;
+import org.eclipse.emf.cdo.server.db.IDBStoreChunkReader;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability;
+import org.eclipse.emf.cdo.server.db.mapping.IListMappingDeltaSupport;
+import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
+import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping;
+import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
+import org.eclipse.emf.cdo.server.internal.db.bundle.OM;
+import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision;
+
+import org.eclipse.net4j.db.DBException;
+import org.eclipse.net4j.db.DBType;
+import org.eclipse.net4j.db.DBUtil;
+import org.eclipse.net4j.db.ddl.IDBField;
+import org.eclipse.net4j.db.ddl.IDBIndex.Type;
+import org.eclipse.net4j.db.ddl.IDBTable;
+import org.eclipse.net4j.util.ImplementationError;
+import org.eclipse.net4j.util.collection.MoveableList;
+import org.eclipse.net4j.util.om.trace.ContextTracer;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EStructuralFeature;
+import org.eclipse.emf.ecore.util.FeatureMap;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * This is a featuremap-table mapping for audit mode. It is optimized for frequent insert operations at the list's end,
+ * which causes just 1 DB row to be changed. This is achieved by introducing a version range (columns
+ * {@link CDODBSchema#LIST_REVISION_VERSION_ADDED cdo_version_added} and
+ * {@link CDODBSchema#LIST_REVISION_VERSION_REMOVED cdo_version_removed}) which records for which revisions a particular
+ * entry existed. Also, this mapping is mainly optimized for potentially very large lists: the need for having the
+ * complete list stored in memory to do in-the-middle-moved and inserts is traded in for a few more DB access
+ * operations.
+ *
+ * @author Eike Stepper
+ * @author Stefan Winkler
+ * @author Lothar Werzinger
+ * @since 3.0
+ */
+public class BranchingFeatureMapTableMappingWithRanges extends BasicAbstractListTableMapping implements
+ IListMappingDeltaSupport
+{
+ private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG,
+ BranchingFeatureMapTableMappingWithRanges.class);
+
+ /**
+ * Used to clean up lists for detached objects.
+ */
+ private static final int FINAL_VERSION = Integer.MAX_VALUE;
+
+ /**
+ * The table of this mapping.
+ */
+ private IDBTable table;
+
+ /**
+ * The tags mapped to column names
+ */
+ private HashMap<Long, String> tagMap;
+
+ /**
+ * Column name Set
+ */
+ private List<String> columnNames;
+
+ /**
+ * The type mappings for the value fields.
+ */
+ private Map<Long, ITypeMapping> typeMappings;
+
+ // --------- SQL strings - see initSQLStrings() -----------------
+ private String sqlSelectChunksPrefix;
+
+ private String sqlOrderByIndex;
+
+ protected String sqlInsert;
+
+ private List<DBType> dbTypes;
+
+ private String sqlRemoveEntry;
+
+ private String sqlDeleteEntry;
+
+ private String sqlUpdateIndex;
+
+ private String sqlGetValue;
+
+ private String sqlClearList;
+
+ public BranchingFeatureMapTableMappingWithRanges(IMappingStrategy mappingStrategy, EClass eClass,
+ EStructuralFeature feature)
+ {
+ super(mappingStrategy, eClass, feature);
+ initDBTypes();
+ initTable();
+ initSQLStrings();
+ }
+
+ private void initDBTypes()
+ {
+ // TODO add annotation processing here ...
+ ITypeMapping.Registry registry = ITypeMapping.Registry.INSTANCE;
+ dbTypes = new ArrayList<DBType>(registry.getDefaultFeatureMapDBTypes());
+ }
+
+ private void initTable()
+ {
+ String tableName = getMappingStrategy().getTableName(getContainingClass(), getFeature());
+ table = getMappingStrategy().getStore().getDBSchema().addTable(tableName);
+
+ // add fields for CDOID
+ IDBField idField = table.addField(CDODBSchema.FEATUREMAP_REVISION_ID, DBType.INTEGER);
+
+ IDBField branchField = table.addField(CDODBSchema.LIST_REVISION_BRANCH, DBType.INTEGER);
+
+ // add fields for version range
+ IDBField versionAddedField = table.addField(CDODBSchema.FEATUREMAP_VERSION_ADDED, DBType.INTEGER);
+ IDBField versionRemovedField = table.addField(CDODBSchema.FEATUREMAP_VERSION_REMOVED, DBType.INTEGER);
+
+ // add field for list index
+ IDBField idxField = table.addField(CDODBSchema.FEATUREMAP_IDX, DBType.INTEGER);
+
+ // add field for FeatureMap tag (MetaID for Feature in CDO registry)
+ IDBField tagField = table.addField(CDODBSchema.FEATUREMAP_TAG, DBType.INTEGER);
+
+ tagMap = new HashMap<Long, String>();
+ typeMappings = new HashMap<Long, ITypeMapping>();
+ columnNames = new ArrayList<String>();
+
+ // create columns for all DBTypes
+ for (DBType type : getDBTypes())
+ {
+ String column = CDODBSchema.FEATUREMAP_VALUE + "_" + type.name();
+ table.addField(column, type);
+ columnNames.add(column);
+ }
+
+ table.addIndex(Type.NON_UNIQUE, idField);
+ table.addIndex(Type.NON_UNIQUE, branchField);
+ table.addIndex(Type.NON_UNIQUE, versionAddedField);
+ table.addIndex(Type.NON_UNIQUE, versionRemovedField);
+ table.addIndex(Type.NON_UNIQUE, idxField);
+ table.addIndex(Type.NON_UNIQUE, tagField);
+ }
+
+ public Collection<IDBTable> getDBTables()
+ {
+ return Arrays.asList(table);
+ }
+
+ private void initSQLStrings()
+ {
+ String tableName = getTable().getName();
+
+ // ---------------- SELECT to read chunks ----------------------------
+ StringBuilder builder = new StringBuilder();
+ builder.append("SELECT "); //$NON-NLS-1$
+
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(", "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_TAG);
+ builder.append(", "); //$NON-NLS-1$
+
+ Iterator<String> iter = columnNames.iterator();
+ while (iter.hasNext())
+ {
+ builder.append(iter.next());
+ if (iter.hasNext())
+ {
+ builder.append(", "); //$NON-NLS-1$
+ }
+ }
+
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append("<=? AND ("); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL OR "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(">?)"); //$NON-NLS-1$
+ sqlSelectChunksPrefix = builder.toString();
+
+ sqlOrderByIndex = " ORDER BY " + CDODBSchema.FEATUREMAP_IDX; //$NON-NLS-1$
+
+ // ----------------- INSERT - prefix -----------------
+ builder = new StringBuilder("INSERT INTO "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append("("); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_BRANCH);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VALUE);
+
+ for (int i = 0; i < columnNames.size(); i++)
+ {
+ builder.append(columnNames.get(i));
+ builder.append(", "); //$NON-NLS-1$
+ }
+
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append(", "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_TAG);
+ builder.append(") VALUES (?, ?, ?, ?, ?, ?, "); //$NON-NLS-1$
+ for (int i = 0; i < columnNames.size(); i++)
+ {
+ builder.append("?, "); //$NON-NLS-1$
+ }
+
+ builder.append("?, ?)"); //$NON-NLS-1$
+ sqlInsert = builder.toString();
+
+ // ----------------- remove current entry -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append("=? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlRemoveEntry = builder.toString();
+
+ // ----------------- delete temporary entry -----------------
+ builder = new StringBuilder("DELETE FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append("=?"); //$NON-NLS-1$
+ sqlDeleteEntry = builder.toString();
+
+ // ----------------- update index -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append("=? WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_ADDED);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append("=?"); //$NON-NLS-1$
+ sqlUpdateIndex = builder.toString();
+
+ // ----------------- get current value -----------------
+ builder = new StringBuilder("SELECT "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_TAG);
+ builder.append(", "); //$NON-NLS-1$
+
+ iter = columnNames.iterator();
+ while (iter.hasNext())
+ {
+ builder.append(iter.next());
+ if (iter.hasNext())
+ {
+ builder.append(", "); //$NON-NLS-1$
+ }
+ }
+
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_IDX);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlGetValue = builder.toString();
+
+ // ----------- clear list items -------------------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append("=? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.FEATUREMAP_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlClearList = builder.toString();
+ }
+
+ protected List<DBType> getDBTypes()
+ {
+ return dbTypes;
+ }
+
+ protected final IDBTable getTable()
+ {
+ return table;
+ }
+
+ protected final List<String> getColumnNames()
+ {
+ return columnNames;
+ }
+
+ protected final Map<Long, ITypeMapping> getTypeMappings()
+ {
+ return typeMappings;
+ }
+
+ protected final Map<Long, String> getTagMap()
+ {
+ return tagMap;
+ }
+
+ public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, int listChunk)
+ {
+ MoveableList<Object> list = revision.getList(getFeature());
+ int valuesToRead = list.size();
+
+ if (listChunk != CDORevision.UNCHUNKED && listChunk < valuesToRead)
+ {
+ valuesToRead = listChunk;
+ }
+
+ if (valuesToRead == 0)
+ {
+ // nothing to read take shortcut
+ return;
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list values for feature {0}.{1} of {2}", getContainingClass().getName(), getFeature() //$NON-NLS-1$
+ .getName(), revision);
+ }
+
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ IStoreChunkReader baseReader = null;
+ try
+ {
+ String sql = sqlSelectChunksPrefix + sqlOrderByIndex;
+
+ CDOID id = revision.getID();
+ int branchID = revision.getBranch().getID();
+
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.HIGH);
+ pstmt.setLong(1, CDOIDUtil.getLong(id));
+ pstmt.setInt(2, branchID);
+ pstmt.setInt(3, revision.getVersion());
+ pstmt.setInt(4, revision.getVersion());
+
+ pstmt.setMaxRows(valuesToRead); // optimization - don't read unneeded rows.
+
+ resultSet = pstmt.executeQuery();
+
+ int currentIndex = 0;
+
+ while (valuesToRead > 0 && resultSet.next())
+ {
+ int index = resultSet.getInt(1);
+ if (index > currentIndex)
+ {
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(accessor, id, branchID);
+ }
+
+ baseReader.addRangedChunk(currentIndex, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Scheduling range {0}-{1} to be read from base revision", currentIndex, index); //$NON-NLS-1$
+ }
+
+ valuesToRead -= index - currentIndex;
+ currentIndex = index;
+ }
+
+ Long tag = resultSet.getLong(2);
+ Object value = getTypeMapping(tag).readValue(resultSet);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value for index {0} from result set: {1}", currentIndex, value); //$NON-NLS-1$
+ }
+
+ list.set(currentIndex++, CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value));
+ valuesToRead--;
+ }
+
+ if (valuesToRead > 0)
+ {
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(accessor, id, branchID);
+ }
+
+ baseReader.addRangedChunk(currentIndex, currentIndex + valuesToRead);
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ if (baseReader != null)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading base revision chunks for featureMap {0}.{1} of {2} from base revision {3}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), revision, baseReader.getRevision());
+ }
+
+ List<Chunk> baseChunks = baseReader.executeRead();
+ for (Chunk chunk : baseChunks)
+ {
+ int startIndex = chunk.getStartIndex();
+ for (int i = 0; i < chunk.size(); i++)
+ {
+ list.set(startIndex + i, chunk.get(i));
+ }
+ }
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list values done for feature {0}.{1} of {2}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), revision);
+ }
+ }
+
+ private void addFeature(Long tag)
+ {
+ EStructuralFeature modelFeature = getFeatureByTag(tag);
+
+ ITypeMapping typeMapping = getMappingStrategy().createValueMapping(modelFeature);
+ String column = CDODBSchema.FEATUREMAP_VALUE + "_" + typeMapping.getDBType(); //$NON-NLS-1$
+
+ tagMap.put(tag, column);
+ typeMapping.setDBField(table, column);
+ typeMappings.put(tag, typeMapping);
+ }
+
+ public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where)
+ {
+ CDORevision revision = chunkReader.getRevision();
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values for feature {0}.{1} of {2}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), revision);
+ }
+
+ IPreparedStatementCache statementCache = chunkReader.getAccessor().getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ IStoreChunkReader baseReader = null;
+ try
+ {
+ StringBuilder builder = new StringBuilder(sqlSelectChunksPrefix);
+ if (where != null)
+ {
+ builder.append(" AND "); //$NON-NLS-1$
+ builder.append(where);
+ }
+
+ builder.append(sqlOrderByIndex);
+
+ String sql = builder.toString();
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.LOW);
+ pstmt.setLong(1, CDOIDUtil.getLong(revision.getID()));
+ pstmt.setInt(2, revision.getBranch().getID());
+ pstmt.setInt(3, revision.getVersion());
+ pstmt.setInt(4, revision.getVersion());
+
+ resultSet = pstmt.executeQuery();
+
+ int nextDBIndex = Integer.MAX_VALUE; // next available DB index
+ if (resultSet.next())
+ {
+ nextDBIndex = resultSet.getInt(1);
+ }
+
+ for (Chunk chunk : chunks)
+ {
+ int startIndex = chunk.getStartIndex();
+ int missingValueStartIndex = -1;
+
+ for (int i = 0; i < chunk.size(); i++)
+ {
+ int nextListIndex = startIndex + i; // next expected list index
+
+ if (nextDBIndex == nextListIndex)
+ {
+ // DB value is available. check first if missing indexes were present before.
+ if (missingValueStartIndex != -1)
+ {
+ // read missing indexes from missingValueStartIndex to currentIndex
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(chunkReader.getAccessor(), chunkReader.getRevision().getID(),
+ chunkReader.getRevision().getBranch().getID());
+ }
+ if (TRACER.isEnabled())
+ {
+ TRACER.format(
+ "Scheduling range {0}-{1} to be read from base revision", missingValueStartIndex, nextListIndex); //$NON-NLS-1$
+ }
+
+ baseReader.addRangedChunk(missingValueStartIndex, nextListIndex);
+
+ // reset missingValueStartIndex
+ missingValueStartIndex = -1;
+ }
+
+ // now read value and set to chunk
+ Long tag = resultSet.getLong(2);
+ Object value = getTypeMapping(tag).readValue(resultSet);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ChunkReader read value for index {0} from result set: {1}", nextDBIndex, value); //$NON-NLS-1$
+ }
+ chunk.add(i, CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value));
+
+ // advance DB cursor and read next available index
+ if (resultSet.next())
+ {
+ nextDBIndex = resultSet.getInt(1);
+ }
+ else
+ {
+ // no more DB indexes available, but we have to continue checking for gaps, therefore set to MAX_VALUE
+ nextDBIndex = Integer.MAX_VALUE;
+ }
+ }
+ else
+ {
+ // gap between next DB index and next list index detected.
+ // skip until end of chunk or until DB value becomes available
+ if (missingValueStartIndex == -1)
+ {
+ missingValueStartIndex = nextListIndex;
+ }
+ }
+ }
+
+ // chunk complete. check for missing values at the end of the chunk.
+ if (missingValueStartIndex != -1)
+ {
+ // read missing indexes from missingValueStartIndex to last chunk index
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(chunkReader.getAccessor(), chunkReader.getRevision().getID(),
+ chunkReader.getRevision().getBranch().getID());
+ }
+ baseReader.addRangedChunk(missingValueStartIndex, chunk.getStartIndex() + chunk.size());
+ }
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ // now read missing values from base revision.
+ if (baseReader != null)
+ {
+ List<Chunk> baseChunks = baseReader.executeRead();
+
+ Iterator<Chunk> thisIterator = chunks.iterator();
+ Chunk thisChunk = thisIterator.next();
+
+ for (Chunk baseChunk : baseChunks)
+ {
+ int baseStartIndex = baseChunk.getStartIndex();
+
+ while (baseStartIndex > thisChunk.getStartIndex() + thisChunk.size())
+ {
+ // advance thisChunk, because it does not match baseChunk
+ thisChunk = thisIterator.next();
+ }
+
+ // baseChunk now corresponds to this chunk, but startIndex of baseChunk may be higher.
+ // therefore calculate offset
+ int offset = thisChunk.getStartIndex() - baseStartIndex;
+
+ // and copy values.
+ for (int i = 0; i < baseChunk.size(); i++)
+ {
+ thisChunk.add(i + offset, baseChunk.get(i));
+ }
+ } // finally, continue with the next baseChunk
+
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values done for feature {0}.{1} of {2}", getContainingClass().getName(), //$NON-NLS-1$
+ getTagByFeature(getFeature()), revision);
+ }
+ }
+
+ public void writeValues(IDBStoreAccessor accessor, InternalCDORevision revision)
+ {
+ CDOList values = revision.getList(getFeature());
+
+ int idx = 0;
+ for (Object element : values)
+ {
+ writeValue(accessor, revision, idx++, element);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Writing done"); //$NON-NLS-1$
+ }
+ }
+
+ protected final void writeValue(IDBStoreAccessor accessor, CDORevision revision, int idx, Object value)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER
+ .format(
+ "Writing value for feature {0}.{1} index {2} of {3} : {4}", getContainingClass().getName(), getTagByFeature(getFeature()), idx, revision, value); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, revision.getID(), revision.getBranch().getID(), revision.getVersion(), idx, value);
+ }
+
+ /**
+ * Get column name (lazy).
+ *
+ * @param tag
+ * The feature's MetaID in CDO
+ * @return the column name where the values are stored
+ */
+ protected String getColumnName(Long tag)
+ {
+ String column = tagMap.get(tag);
+ if (column == null)
+ {
+ addFeature(tag);
+ column = tagMap.get(tag);
+ }
+
+ return column;
+ }
+
+ /**
+ * Get type mapping (lazy).
+ *
+ * @param tag
+ * The feature's MetaID in CDO
+ * @return the corresponding type mapping
+ */
+ protected ITypeMapping getTypeMapping(Long tag)
+ {
+ ITypeMapping typeMapping = typeMappings.get(tag);
+ if (typeMapping == null)
+ {
+ addFeature(tag);
+ typeMapping = typeMappings.get(tag);
+ }
+
+ return typeMapping;
+ }
+
+ /**
+ * @param metaID
+ * @return the column name where the values are stored
+ */
+ private EStructuralFeature getFeatureByTag(Long tag)
+ {
+ return (EStructuralFeature)getMappingStrategy().getStore().getMetaDataManager().getMetaInstance(tag);
+ }
+
+ /**
+ * @param feature
+ * The EStructuralFeature
+ * @return The feature's MetaID in CDO
+ */
+ protected Long getTagByFeature(EStructuralFeature feature)
+ {
+ return getMappingStrategy().getStore().getMetaDataManager().getMetaID(feature);
+ }
+
+ /**
+ * Clear a list of a given revision.
+ *
+ * @param accessor
+ * the accessor to use
+ * @param id
+ * the id of the revision from which to remove all items
+ */
+ public void clearList(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion, int lastIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmtDeleteTemp = null;
+ PreparedStatement pstmtClear = null;
+
+ try
+ {
+ // check for each index if the value exists in the current branch
+ for (int i = 0; i <= lastIndex; i++)
+ {
+ if (getValue(accessor, id, branchId, i, false) == null)
+ {
+ // if not, add a historic entry for missing ones.
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, i, getValueFromBase(accessor, id, branchId, i));
+ }
+ }
+
+ // clear rest of the list
+ pstmtClear = statementCache.getPreparedStatement(sqlClearList, ReuseProbability.HIGH);
+ pstmtClear.setInt(1, newVersion);
+ pstmtClear.setLong(2, CDOIDUtil.getLong(id));
+ pstmtClear.setInt(3, branchId);
+
+ int result = CDODBUtil.sqlUpdate(pstmtClear, false);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ClearList result: {0}", result); //$NON-NLS-1$
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmtDeleteTemp);
+ statementCache.releasePreparedStatement(pstmtClear);
+ }
+ }
+
+ public void objectDetached(IDBStoreAccessor accessor, CDOID id, long revised)
+ {
+ InternalCDORevision revision = (InternalCDORevision)accessor.getTransaction().getRevision(id);
+ int branchId = accessor.getTransaction().getBranch().getID();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("objectDetached {1}", revision); //$NON-NLS-1$
+ }
+
+ clearList(accessor, id, branchId, revision.getVersion(), FINAL_VERSION, revision.getList(getFeature()).size() - 1);
+ }
+
+ public void processDelta(final IDBStoreAccessor accessor, final CDOID id, final int branchId, int oldVersion,
+ final int newVersion, long created, CDOListFeatureDelta delta)
+ {
+ List<CDOFeatureDelta> listChanges = delta.getListChanges();
+ if (listChanges.size() == 0)
+ {
+ // nothing to do.
+ return;
+ }
+
+ InternalCDORevision originalRevision = (InternalCDORevision)accessor.getTransaction().getRevision(id);
+ int oldListSize = originalRevision.getList(getFeature()).size();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ListTableMapping.processDelta for revision {0} - previous list size: {1}", originalRevision, //$NON-NLS-1$
+ oldListSize);
+ }
+
+ // let the visitor collect the changes
+ ListDeltaVisitor visitor = new ListDeltaVisitor(accessor, originalRevision, branchId, oldVersion, newVersion);
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Processing deltas..."); //$NON-NLS-1$
+ }
+
+ // optimization: it's only necessary to process deltas
+ // starting with the last feature delta which clears the list
+ // (any operation before the clear is cascaded by it anyway)
+ int index = listChanges.size() - 1;
+ while (index > 0)
+ {
+ CDOFeatureDelta listDelta = listChanges.get(index);
+ if (listDelta instanceof CDOClearFeatureDelta || listDelta instanceof CDOUnsetFeatureDelta)
+ {
+ break;
+ }
+ index--;
+ }
+ while (index < listChanges.size())
+ {
+ listChanges.get(index++).accept(visitor);
+ }
+ }
+
+ private class ListDeltaVisitor implements CDOFeatureDeltaVisitor
+ {
+ private IDBStoreAccessor accessor;
+
+ private InternalCDORevision originalRevision;
+
+ private CDOID id;
+
+ private int branchID;
+
+ private int oldVersion;
+
+ private int newVersion;
+
+ private int lastIndex;
+
+ public ListDeltaVisitor(IDBStoreAccessor accessor, InternalCDORevision originalRevision, int targetBranchID,
+ int oldVersion, int newVersion)
+ {
+ this.accessor = accessor;
+ this.originalRevision = originalRevision;
+ id = this.originalRevision.getID();
+ branchID = targetBranchID;
+ this.oldVersion = oldVersion;
+ this.newVersion = newVersion;
+ lastIndex = originalRevision.getList(getFeature()).size() - 1;
+ }
+
+ public void visit(CDOMoveFeatureDelta delta)
+ {
+ int fromIdx = delta.getOldPosition();
+ int toIdx = delta.getNewPosition();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Moving: {0} to {1}", fromIdx, toIdx); //$NON-NLS-1$
+ }
+
+ Object value = getValue(accessor, id, branchID, fromIdx, true);
+
+ // remove the item
+ removeEntry(accessor, id, branchID, oldVersion, newVersion, fromIdx);
+
+ // adjust indexes and shift either up or down
+ if (fromIdx < toIdx)
+ {
+ moveOneUp(accessor, id, branchID, oldVersion, newVersion, fromIdx + 1, toIdx);
+ }
+ else
+ { // fromIdx > toIdx here
+ moveOneDown(accessor, id, branchID, oldVersion, newVersion, toIdx, fromIdx - 1);
+ }
+
+ // create the item
+ addEntry(accessor, id, branchID, newVersion, toIdx, value);
+ }
+
+ public void visit(CDOAddFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Adding at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ if (startIndex <= endIndex)
+ {
+ // make room for the new item
+ moveOneDown(accessor, id, branchID, oldVersion, newVersion, startIndex, endIndex);
+ }
+
+ // create the item
+ addEntry(accessor, id, branchID, newVersion, startIndex, delta.getValue());
+
+ ++lastIndex;
+ }
+
+ public void visit(CDORemoveFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Removing at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, branchID, oldVersion, newVersion, startIndex);
+
+ // make room for the new item
+ moveOneUp(accessor, id, branchID, oldVersion, newVersion, startIndex + 1, endIndex);
+
+ --lastIndex;
+ }
+
+ public void visit(CDOSetFeatureDelta delta)
+ {
+ int index = delta.getIndex();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Setting at: {0}", index); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, branchID, oldVersion, newVersion, index);
+
+ // create the item
+ addEntry(accessor, id, branchID, newVersion, index, delta.getValue());
+ }
+
+ public void visit(CDOUnsetFeatureDelta delta)
+ {
+ if (delta.getFeature().isUnsettable())
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Unsetting"); //$NON-NLS-1$
+ }
+
+ clearList(accessor, id, branchID, oldVersion, newVersion, lastIndex);
+ lastIndex = -1;
+ }
+
+ public void visit(CDOListFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOClearFeatureDelta delta)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Clearing"); //$NON-NLS-1$
+ }
+
+ clearList(accessor, id, branchID, oldVersion, newVersion, lastIndex);
+ lastIndex = -1;
+ }
+
+ public void visit(CDOContainerFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ private void moveOneUp(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion,
+ int startIndex, int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+
+ for (int index = startIndex; index <= endIndex; ++index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp moving: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index - 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 1:
+ // entry for current revision was already present.
+ // index update succeeded.
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp updated: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ break;
+ case 0:
+ Object value = getValue(accessor, id, branchId, index, false);
+
+ if (value != null)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, branchId, oldVersion, newVersion, index);
+ }
+ else
+ {
+ value = getValueFromBase(accessor, id, branchId, index);
+ {
+ TRACER.format("moveOneUp add historic entry at: {0}", index); //$NON-NLS-1$
+ }
+
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, index, value);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp add: {0}", index - 1); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, id, branchId, newVersion, index - 1, value);
+ break;
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void moveOneDown(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion,
+ int startIndex, int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+ for (int index = endIndex; index >= startIndex; --index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown moving: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index + 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 1:
+ // entry for current revision was already present.
+ // index update succeeded.
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown updated: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ break;
+ case 0:
+ Object value = getValue(accessor, id, branchId, index, false);
+ if (value != null)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, branchId, oldVersion, newVersion, index);
+ }
+ else
+ {
+ value = getValueFromBase(accessor, id, branchId, index);
+ {
+ TRACER.format("moveOneDown add historic entry at: {0}", index); //$NON-NLS-1$
+ }
+
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, index, value);
+ }
+
+ addEntry(accessor, id, branchId, newVersion, index + 1, value);
+ break;
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+ }
+
+ private void addEntry(IDBStoreAccessor accessor, CDOID id, int branchId, int version, int index, Object value)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Adding value for feature() {0}.{1} index {2} of {3}v{4} : {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, version, value);
+ }
+
+ try
+ {
+ FeatureMap.Entry entry = (FeatureMap.Entry)value;
+ EStructuralFeature entryFeature = entry.getEStructuralFeature();
+ Long tag = getTagByFeature(entryFeature);
+ String column = getColumnName(tag);
+
+ pstmt = statementCache.getPreparedStatement(sqlInsert, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, version);
+ pstmt.setNull(stmtIndex++, DBType.INTEGER.getCode()); // versionRemoved
+ pstmt.setInt(stmtIndex++, index);
+
+ for (int i = 0; i < columnNames.size(); i++)
+ {
+ if (columnNames.get(i).equals(column))
+ {
+ getTypeMapping(tag).setValue(pstmt, stmtIndex++, entry.getValue());
+ }
+ else
+ {
+ pstmt.setNull(stmtIndex++, getDBTypes().get(i).getCode());
+ }
+ }
+
+ pstmt.setInt(stmtIndex++, index);
+ pstmt.setLong(stmtIndex++, tag);
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void addHistoricEntry(IDBStoreAccessor accessor, CDOID id, int branchId, int versionAdded,
+ int versionRemoved, int index, Object value)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format(
+ "Adding historic value for feature {0}.{1} index {2} of {3}:{4}v{5}-v{6} : {7}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, branchId, versionAdded, versionRemoved,
+ value);
+ }
+
+ try
+ {
+ FeatureMap.Entry entry = (FeatureMap.Entry)value;
+ EStructuralFeature entryFeature = entry.getEStructuralFeature();
+ Long tag = getTagByFeature(entryFeature);
+ String column = getColumnName(tag);
+
+ pstmt = statementCache.getPreparedStatement(sqlInsert, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, versionAdded);
+ pstmt.setNull(stmtIndex++, versionRemoved);
+ pstmt.setInt(stmtIndex++, index);
+
+ for (int i = 0; i < columnNames.size(); i++)
+ {
+ if (columnNames.get(i).equals(column))
+ {
+ getTypeMapping(tag).setValue(pstmt, stmtIndex++, entry.getValue());
+ }
+ else
+ {
+ pstmt.setNull(stmtIndex++, getDBTypes().get(i).getCode());
+ }
+ }
+
+ pstmt.setInt(stmtIndex++, index);
+ pstmt.setLong(stmtIndex++, tag);
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void removeEntry(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion, int index)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion);
+ }
+
+ try
+ {
+ // try to delete a temporary entry first
+ pstmt = statementCache.getPreparedStatement(sqlDeleteEntry, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, index);
+ pstmt.setInt(stmtIndex++, newVersion);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ if (result == 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry deleted: {0}", index); //$NON-NLS-1$
+ }
+ }
+ else if (result > 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry Too many results: {0}: {1}", index, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ else
+ {
+ // no temporary entry found, so mark the entry as removed
+ statementCache.releasePreparedStatement(pstmt);
+ pstmt = statementCache.getPreparedStatement(sqlRemoveEntry, ReuseProbability.HIGH);
+
+ stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, index);
+ result = CDODBUtil.sqlUpdate(pstmt, false);
+
+ if (result == 0)
+ {
+ // no entry removed -> this means that we are in a branch and
+ // the entry has not been modified since the branch fork.
+ // therefore, we have to copy the base value and mark it as removed
+ Object value = getValueFromBase(accessor, id, branchId, index);
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, index, value);
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4} FAILED {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature() {0}.{1} index {2} of {3}v{4} FAILED {5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private FeatureMap.Entry getValue(IDBStoreAccessor accessor, CDOID id, int branchId, int index, boolean getFromBase)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ FeatureMap.Entry result = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlGetValue, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, index);
+
+ ResultSet resultSet = pstmt.executeQuery();
+ if (resultSet.next())
+ {
+ Long tag = resultSet.getLong(1);
+ Object value = getTypeMapping(tag).readValue(resultSet);
+ result = CDORevisionUtil.createFeatureMapEntry(getFeatureByTag(tag), value);
+ }
+ else
+ {
+ // value is not in this branch.
+ // -> read from base revision
+ if (getFromBase)
+ {
+ result = getValueFromBase(accessor, id, branchId, index);
+ } // else: result remains null
+ }
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value (index {0}) from result set: {1}", index, result); //$NON-NLS-1$
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ return result;
+ }
+
+ /**
+ * Read a single value (at a given index) from the base revision
+ *
+ * @param accessor
+ * the DBStoreAccessor
+ * @param id
+ * the ID of the revision
+ * @param branchID
+ * the ID of the current (child) branch
+ * @param index
+ * the index to read the value from
+ * @return the value which is at index <code>index</code> in revision with ID <code>id</code> in the parent branch at
+ * the base of this branch (indicated by <code>branchID</code>).
+ */
+ private FeatureMap.Entry getValueFromBase(IDBStoreAccessor accessor, CDOID id, int branchID, int index)
+ {
+ IStoreChunkReader chunkReader = createBaseChunkReader(accessor, id, branchID);
+ chunkReader.addSimpleChunk(index);
+ List<Chunk> chunks = chunkReader.executeRead();
+ return (FeatureMap.Entry)chunks.get(0).get(0);
+ }
+
+ private IStoreChunkReader createBaseChunkReader(IDBStoreAccessor accessor, CDOID id, int branchID)
+ {
+ CDOBranchPoint base = accessor.getStore().getRepository().getBranchManager().getBranch(branchID).getBase();
+ InternalCDORevision baseRevision = (InternalCDORevision)accessor.getStore().getRepository().getRevisionManager()
+ .getRevision(id, base, /* referenceChunk = */0, /* prefetchDepth = */CDORevision.DEPTH_NONE, true);
+ IStoreChunkReader chunkReader = accessor.createChunkReader(baseRevision, getFeature());
+ return chunkReader;
+ }
+
+ public final boolean queryXRefs(IDBStoreAccessor accessor, String mainTableName, String mainTableWhere,
+ QueryXRefsContext context, String idString)
+ {
+ // must never be called (a feature map is not associated with an EReference feature, so XRefs are nor supported
+ // here)
+ throw new ImplementationError("Should never be called!");
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BranchingListTableMappingWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BranchingListTableMappingWithRanges.java new file mode 100644 index 0000000000..5dfe3b85e1 --- /dev/null +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/BranchingListTableMappingWithRanges.java @@ -0,0 +1,1394 @@ +/**
+ * Copyright (c) 2004 - 2010 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * This class has been derived from AbstractListTableMapping
+ *
+ * Contributors:
+ * Stefan Winkler - initial API and implementation taken from AuditListTableMappingWithRanges
+ * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy
+ */
+package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
+
+import org.eclipse.emf.cdo.common.branch.CDOBranchPoint;
+import org.eclipse.emf.cdo.common.id.CDOID;
+import org.eclipse.emf.cdo.common.id.CDOIDUtil;
+import org.eclipse.emf.cdo.common.revision.CDOList;
+import org.eclipse.emf.cdo.common.revision.CDORevision;
+import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOContainerFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDeltaVisitor;
+import org.eclipse.emf.cdo.common.revision.delta.CDOListFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOMoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDORemoveFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOSetFeatureDelta;
+import org.eclipse.emf.cdo.common.revision.delta.CDOUnsetFeatureDelta;
+import org.eclipse.emf.cdo.server.IStoreAccessor.QueryXRefsContext;
+import org.eclipse.emf.cdo.server.IStoreChunkReader;
+import org.eclipse.emf.cdo.server.IStoreChunkReader.Chunk;
+import org.eclipse.emf.cdo.server.db.CDODBUtil;
+import org.eclipse.emf.cdo.server.db.IDBStoreAccessor;
+import org.eclipse.emf.cdo.server.db.IDBStoreChunkReader;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache;
+import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability;
+import org.eclipse.emf.cdo.server.db.mapping.IListMappingDeltaSupport;
+import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy;
+import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping;
+import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
+import org.eclipse.emf.cdo.server.internal.db.bundle.OM;
+import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision;
+
+import org.eclipse.net4j.db.DBException;
+import org.eclipse.net4j.db.DBType;
+import org.eclipse.net4j.db.DBUtil;
+import org.eclipse.net4j.db.ddl.IDBField;
+import org.eclipse.net4j.db.ddl.IDBIndex.Type;
+import org.eclipse.net4j.db.ddl.IDBTable;
+import org.eclipse.net4j.util.ImplementationError;
+import org.eclipse.net4j.util.collection.MoveableList;
+import org.eclipse.net4j.util.om.trace.ContextTracer;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EReference;
+import org.eclipse.emf.ecore.EStructuralFeature;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * This is a list-table mapping for audit mode. It is optimized for frequent insert operations at the list's end, which
+ * causes just 1 DB row to be changed. This is achieved by introducing a version range (columns cdo_version_added and
+ * cdo_version_removed) which records for which revisions a particular entry existed. Also, this mapping is mainly
+ * optimized for potentially very large lists: the need for having the complete list stored in memopy to do
+ * in-the-middle-moved and inserts is traded in for a few more DB access operations.
+ *
+ * @author Eike Stepper
+ * @author Stefan Winkler
+ * @author Lothar Werzinger
+ */
+public class BranchingListTableMappingWithRanges extends BasicAbstractListTableMapping implements
+ IListMappingDeltaSupport
+{
+ private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, BranchingListTableMappingWithRanges.class);
+
+ /**
+ * Used to clean up lists for detached objects.
+ */
+ private static final int FINAL_VERSION = Integer.MAX_VALUE;
+
+ /**
+ * The table of this mapping.
+ */
+ private IDBTable table;
+
+ /**
+ * The type mapping for the value field.
+ */
+ private ITypeMapping typeMapping;
+
+ // --------- SQL strings - see initSQLStrings() -----------------
+ private String sqlSelectChunksPrefix;
+
+ private String sqlOrderByIndex;
+
+ private String sqlInsertEntry;
+
+ private String sqlDeleteEntry;
+
+ private String sqlRemoveEntry;
+
+ private String sqlUpdateIndex;
+
+ private String sqlGetValue;
+
+ private String sqlClearList;
+
+ public BranchingListTableMappingWithRanges(IMappingStrategy mappingStrategy, EClass eClass, EStructuralFeature feature)
+ {
+ super(mappingStrategy, eClass, feature);
+ initTable();
+ initSQLStrings();
+ }
+
+ private void initTable()
+ {
+ String tableName = getMappingStrategy().getTableName(getContainingClass(), getFeature());
+ table = getMappingStrategy().getStore().getDBSchema().addTable(tableName);
+
+ IDBField[] dbFields = new IDBField[5];
+
+ dbFields[0] = table.addField(CDODBSchema.LIST_REVISION_ID, DBType.BIGINT);
+ dbFields[1] = table.addField(CDODBSchema.LIST_REVISION_BRANCH, DBType.INTEGER);
+ dbFields[2] = table.addField(CDODBSchema.LIST_REVISION_VERSION_ADDED, DBType.INTEGER);
+ dbFields[3] = table.addField(CDODBSchema.LIST_REVISION_VERSION_REMOVED, DBType.INTEGER);
+ dbFields[4] = table.addField(CDODBSchema.LIST_IDX, DBType.INTEGER);
+
+ // add field for value
+ typeMapping = getMappingStrategy().createValueMapping(getFeature());
+ typeMapping.createDBField(table, CDODBSchema.LIST_VALUE);
+
+ // add table indexes
+ for (IDBField dbField : dbFields)
+ {
+ table.addIndex(Type.NON_UNIQUE, dbField);
+ }
+ }
+
+ public Collection<IDBTable> getDBTables()
+ {
+ return Arrays.asList(table);
+ }
+
+ private void initSQLStrings()
+ {
+ String tableName = getTable().getName();
+
+ // ---------------- read chunks ----------------------------
+ StringBuilder builder = new StringBuilder();
+ builder.append("SELECT "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(", "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append("<=? AND ("); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL OR "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(">?)"); //$NON-NLS-1$
+ sqlSelectChunksPrefix = builder.toString();
+
+ sqlOrderByIndex = " ORDER BY " + CDODBSchema.LIST_IDX; //$NON-NLS-1$
+
+ // ----------------- insert entry -----------------
+ builder = new StringBuilder("INSERT INTO "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append("("); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_BRANCH);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(","); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(") VALUES (?, ?, ?, ?, ?, ?)"); //$NON-NLS-1$
+ sqlInsertEntry = builder.toString();
+
+ // ----------------- remove current entry -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(getTable());
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append("=? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlRemoveEntry = builder.toString();
+
+ // ----------------- delete temporary entry -----------------
+ builder = new StringBuilder("DELETE FROM "); //$NON-NLS-1$
+ builder.append(getTable());
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append("=?"); //$NON-NLS-1$
+ sqlDeleteEntry = builder.toString();
+
+ // ----------------- update index -----------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(getTable());
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append("=? WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_ADDED);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append("=?"); //$NON-NLS-1$
+ sqlUpdateIndex = builder.toString();
+
+ // ----------------- get current value -----------------
+ builder = new StringBuilder("SELECT "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(getTable());
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlGetValue = builder.toString();
+
+ // ----------- clear list items -------------------------
+ builder = new StringBuilder("UPDATE "); //$NON-NLS-1$
+ builder.append(getTable());
+ builder.append(" SET "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append("=? "); //$NON-NLS-1$
+ builder.append(" WHERE "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_BRANCH);
+ builder.append("=? AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_VERSION_REMOVED);
+ builder.append(" IS NULL"); //$NON-NLS-1$
+ sqlClearList = builder.toString();
+ }
+
+ protected final IDBTable getTable()
+ {
+ return table;
+ }
+
+ protected final ITypeMapping getTypeMapping()
+ {
+ return typeMapping;
+ }
+
+ public void readValues(IDBStoreAccessor accessor, InternalCDORevision revision, final int listChunk)
+ {
+ MoveableList<Object> list = revision.getList(getFeature());
+ int valuesToRead = list.size();
+ if (listChunk != CDORevision.UNCHUNKED && listChunk < valuesToRead)
+ {
+ valuesToRead = listChunk;
+ }
+
+ if (valuesToRead == 0)
+ {
+ // nothing to read take shortcut
+ return;
+ }
+
+ CDOID id = revision.getID();
+ int branchID = revision.getBranch().getID();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list values for feature {0}.{1} of {2}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), revision);
+ }
+
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ IStoreChunkReader baseReader = null;
+ try
+ {
+ String sql = sqlSelectChunksPrefix + sqlOrderByIndex;
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.HIGH);
+ pstmt.setLong(1, CDOIDUtil.getLong(id));
+ pstmt.setInt(2, branchID);
+ pstmt.setInt(3, revision.getVersion());
+ pstmt.setInt(4, revision.getVersion());
+ pstmt.setMaxRows(valuesToRead); // optimization - don't read unneeded rows.
+
+ resultSet = pstmt.executeQuery();
+
+ int currentIndex = 0;
+
+ while (valuesToRead > 0 && resultSet.next())
+ {
+ int index = resultSet.getInt(1);
+ if (index > currentIndex)
+ {
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(accessor, id, branchID);
+ }
+
+ baseReader.addRangedChunk(currentIndex, index);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Scheduling range {0}-{1} to be read from base revision", currentIndex, index); //$NON-NLS-1$
+ }
+
+ valuesToRead -= index - currentIndex;
+ currentIndex = index;
+ }
+
+ Object value = typeMapping.readValue(resultSet);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value for index {0} from result set: {1}", currentIndex, value); //$NON-NLS-1$
+ }
+
+ list.set(currentIndex++, value);
+ valuesToRead--;
+ }
+
+ if (valuesToRead > 0)
+ {
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(accessor, id, branchID);
+ }
+
+ baseReader.addRangedChunk(currentIndex, currentIndex + valuesToRead);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format(
+ "Scheduling range {0}-{1} to be read from base revision", currentIndex, currentIndex + valuesToRead); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ if (baseReader != null)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading base revision chunks for feature {0}.{1} of {2} from base revision {3}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), revision, baseReader.getRevision());
+ }
+
+ List<Chunk> baseChunks = baseReader.executeRead();
+ for (Chunk chunk : baseChunks)
+ {
+ int startIndex = chunk.getStartIndex();
+ for (int i = 0; i < chunk.size(); i++)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Copying value {0} at chunk index {1}+{2} to index {3}", //$NON-NLS-1$
+ chunk.get(i), startIndex, i, startIndex + i);
+ }
+
+ list.set(startIndex + i, chunk.get(i));
+ }
+ }
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading {3} list values done for feature {0}.{1} of {2}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), revision, list.size());
+ }
+ }
+
+ public final void readChunks(IDBStoreChunkReader chunkReader, List<Chunk> chunks, String where)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values for feature {0}.{1} of {2}", getContainingClass().getName(), //$NON-NLS-1$
+ getFeature().getName(), chunkReader.getRevision());
+ }
+
+ IPreparedStatementCache statementCache = chunkReader.getAccessor().getStatementCache();
+ PreparedStatement pstmt = null;
+ ResultSet resultSet = null;
+
+ IStoreChunkReader baseReader = null;
+ try
+ {
+ StringBuilder builder = new StringBuilder(sqlSelectChunksPrefix);
+ if (where != null)
+ {
+ builder.append(" AND "); //$NON-NLS-1$
+ builder.append(where);
+ }
+
+ builder.append(sqlOrderByIndex);
+
+ String sql = builder.toString();
+ pstmt = statementCache.getPreparedStatement(sql, ReuseProbability.LOW);
+ pstmt.setLong(1, CDOIDUtil.getLong(chunkReader.getRevision().getID()));
+ pstmt.setInt(2, chunkReader.getRevision().getBranch().getID());
+ pstmt.setInt(3, chunkReader.getRevision().getVersion());
+ pstmt.setInt(4, chunkReader.getRevision().getVersion());
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Readung Chunks: {0}", pstmt); //$NON-NLS-1$
+ }
+
+ resultSet = pstmt.executeQuery();
+
+ int nextDBIndex = Integer.MAX_VALUE; // next available DB index
+ if (resultSet.next())
+ {
+ nextDBIndex = resultSet.getInt(1);
+ }
+
+ for (Chunk chunk : chunks)
+ {
+ int startIndex = chunk.getStartIndex();
+ int missingValueStartIndex = -1;
+
+ for (int i = 0; i < chunk.size(); i++)
+ {
+ int nextListIndex = startIndex + i; // next expected list index
+
+ if (nextDBIndex == nextListIndex)
+ {
+ // DB value is available. check first if missing indexes were present before.
+ if (missingValueStartIndex != -1)
+ {
+ // read missing indexes from missingValueStartIndex to currentIndex
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(chunkReader.getAccessor(), chunkReader.getRevision().getID(),
+ chunkReader.getRevision().getBranch().getID());
+ }
+ if (TRACER.isEnabled())
+ {
+ TRACER.format(
+ "Scheduling range {0}-{1} to be read from base revision", missingValueStartIndex, nextListIndex); //$NON-NLS-1$
+ }
+
+ baseReader.addRangedChunk(missingValueStartIndex, nextListIndex);
+
+ // reset missingValueStartIndex
+ missingValueStartIndex = -1;
+ }
+
+ // now read value and set to chunk
+ Object value = typeMapping.readValue(resultSet);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ChunkReader read value for index {0} from result set: {1}", nextDBIndex, value); //$NON-NLS-1$
+ }
+ chunk.add(i, value);
+
+ // advance DB cursor and read next available index
+ if (resultSet.next())
+ {
+ nextDBIndex = resultSet.getInt(1);
+ }
+ else
+ {
+ // no more DB indexes available, but we have to continue checking for gaps, therefore set to MAX_VALUE
+ nextDBIndex = Integer.MAX_VALUE;
+ }
+ }
+ else
+ {
+ // gap between next DB index and next list index detected.
+ // skip until end of chunk or until DB value becomes available
+ if (missingValueStartIndex == -1)
+ {
+ missingValueStartIndex = nextListIndex;
+ }
+ }
+ }
+
+ // chunk complete. check for missing values at the end of the chunk.
+ if (missingValueStartIndex != -1)
+ {
+ // read missing indexes from missingValueStartIndex to last chunk index
+ if (baseReader == null)
+ {
+ baseReader = createBaseChunkReader(chunkReader.getAccessor(), chunkReader.getRevision().getID(),
+ chunkReader.getRevision().getBranch().getID());
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER
+ .format(
+ "Scheduling range {0}-{1} to be read from base revision", missingValueStartIndex, chunk.getStartIndex() + chunk.size()); //$NON-NLS-1$
+ }
+ baseReader.addRangedChunk(missingValueStartIndex, chunk.getStartIndex() + chunk.size());
+ }
+ }
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ // now read missing values from base revision.
+ if (baseReader != null)
+ {
+ List<Chunk> baseChunks = baseReader.executeRead();
+
+ Iterator<Chunk> thisIterator = chunks.iterator();
+ Chunk thisChunk = thisIterator.next();
+
+ for (Chunk baseChunk : baseChunks)
+ {
+ int baseStartIndex = baseChunk.getStartIndex();
+
+ while (baseStartIndex > thisChunk.getStartIndex() + thisChunk.size())
+ {
+ // advance thisChunk, because it does not match baseChunk
+ thisChunk = thisIterator.next();
+ }
+
+ // baseChunk now corresponds to thisChunk, but startIndex of baseChunk may be higher.
+ // therefore calculate offset
+ int offset = baseStartIndex - thisChunk.getStartIndex();
+
+ // and copy values.
+ for (int i = 0; i < baseChunk.size(); i++)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Copying base chunk reader value {0} at index {1} to current chunk reader at index {2}.",
+ baseChunk.get(i), baseChunk.getStartIndex() + i, thisChunk.getStartIndex() + i + offset);
+ }
+
+ thisChunk.add(i + offset, baseChunk.get(i));
+ } // finally, continue with the next baseChunk
+ }
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Reading list chunk values done for feature {0}.{1} of {2}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), chunkReader.getRevision());
+ }
+ }
+
+ public void writeValues(IDBStoreAccessor accessor, InternalCDORevision revision)
+ {
+ CDOList values = revision.getList(getFeature());
+
+ int idx = 0;
+ for (Object element : values)
+ {
+ writeValue(accessor, revision, idx++, element);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Writing done"); //$NON-NLS-1$
+ }
+ }
+
+ protected final void writeValue(IDBStoreAccessor accessor, CDORevision revision, int index, Object value)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Writing value for feature {0}.{1} index {2} of {3} : {4}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, revision, value);
+ }
+
+ addEntry(accessor, revision.getID(), revision.getBranch().getID(), revision.getVersion(), index, value);
+ }
+
+ /**
+ * Clear a list of a given revision.
+ *
+ * @param accessor
+ * the accessor to use
+ * @param id
+ * the id of the revision from which to remove all items
+ * @param lastIndex
+ */
+ public void clearList(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion, int lastIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmtDeleteTemp = null;
+ PreparedStatement pstmtClear = null;
+
+ try
+ {
+ // check for each index if the value exists in the current branch
+ for (int i = 0; i <= lastIndex; i++)
+ {
+ if (getValue(accessor, id, branchId, i, false) == null)
+ {
+ // if not, add a historic entry for missing ones.
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, i, getValueFromBase(accessor, id, branchId, i));
+ }
+ }
+
+ // clear rest of the list
+ pstmtClear = statementCache.getPreparedStatement(sqlClearList, ReuseProbability.HIGH);
+ pstmtClear.setInt(1, newVersion);
+ pstmtClear.setLong(2, CDOIDUtil.getLong(id));
+ pstmtClear.setInt(3, branchId);
+
+ int result = CDODBUtil.sqlUpdate(pstmtClear, false);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ClearList result: {0}", result); //$NON-NLS-1$
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmtDeleteTemp);
+ statementCache.releasePreparedStatement(pstmtClear);
+ }
+ }
+
+ public void objectDetached(IDBStoreAccessor accessor, CDOID id, long revised)
+ {
+ InternalCDORevision revision = (InternalCDORevision)accessor.getTransaction().getRevision(id);
+ int branchId = accessor.getTransaction().getBranch().getID();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("objectDetached {1}", revision); //$NON-NLS-1$
+ }
+
+ clearList(accessor, id, branchId, revision.getVersion(), FINAL_VERSION, revision.getList(getFeature()).size() - 1);
+ }
+
+ public void processDelta(final IDBStoreAccessor accessor, final CDOID id, final int branchId, final int oldVersion,
+ final int newVersion, long created, CDOListFeatureDelta delta)
+ {
+ List<CDOFeatureDelta> listChanges = delta.getListChanges();
+ if (listChanges.size() == 0)
+ {
+ // nothing to do.
+ return;
+ }
+
+ InternalCDORevision originalRevision = (InternalCDORevision)accessor.getTransaction().getRevision(id);
+ int oldListSize = originalRevision.getList(getFeature()).size();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("ListTableMapping.processDelta for revision {0} - previous list size: {1}", originalRevision, //$NON-NLS-1$
+ oldListSize);
+ }
+
+ // let the visitor collect the changes
+ ListDeltaVisitor visitor = new ListDeltaVisitor(accessor, originalRevision, branchId, oldVersion, newVersion);
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Processing deltas..."); //$NON-NLS-1$
+ }
+
+ // optimization: it's only necessary to process deltas
+ // starting with the last feature delta which clears the list
+ // (any operation before the clear is cascaded by it anyway)
+ int index = listChanges.size() - 1;
+ while (index > 0)
+ {
+ CDOFeatureDelta listDelta = listChanges.get(index);
+ if (listDelta instanceof CDOClearFeatureDelta || listDelta instanceof CDOUnsetFeatureDelta)
+ {
+ break;
+ }
+ index--;
+ }
+ while (index < listChanges.size())
+ {
+ listChanges.get(index++).accept(visitor);
+ }
+ }
+
+ /**
+ * @author Stefan Winkler
+ */
+ private class ListDeltaVisitor implements CDOFeatureDeltaVisitor
+ {
+ private IDBStoreAccessor accessor;
+
+ private CDOID id;
+
+ private int branchID;
+
+ private int oldVersion;
+
+ private int newVersion;
+
+ private int lastIndex;
+
+ public ListDeltaVisitor(IDBStoreAccessor accessor, InternalCDORevision originalRevision, int targetBranchID,
+ int oldVersion, int newVersion)
+ {
+ this.accessor = accessor;
+ id = originalRevision.getID();
+ branchID = targetBranchID;
+ this.oldVersion = oldVersion;
+ this.newVersion = newVersion;
+ lastIndex = originalRevision.getList(getFeature()).size() - 1;
+ }
+
+ public void visit(CDOMoveFeatureDelta delta)
+ {
+ int fromIdx = delta.getOldPosition();
+ int toIdx = delta.getNewPosition();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Moving: {0} to {1}", fromIdx, toIdx); //$NON-NLS-1$
+ }
+
+ Object value = getValue(accessor, id, branchID, fromIdx, true);
+
+ // remove the item
+ removeEntry(accessor, id, branchID, oldVersion, newVersion, fromIdx);
+
+ // adjust indexes and shift either up or down
+ if (fromIdx < toIdx)
+ {
+ moveOneUp(accessor, id, branchID, oldVersion, newVersion, fromIdx + 1, toIdx);
+ }
+ else
+ { // fromIdx > toIdx here
+ moveOneDown(accessor, id, branchID, oldVersion, newVersion, toIdx, fromIdx - 1);
+ }
+
+ // create the item
+ addEntry(accessor, id, branchID, newVersion, toIdx, value);
+ }
+
+ public void visit(CDOAddFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Adding at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ if (startIndex <= endIndex)
+ {
+ // make room for the new item
+ moveOneDown(accessor, id, branchID, oldVersion, newVersion, startIndex, endIndex);
+ }
+
+ // create the item
+ addEntry(accessor, id, branchID, newVersion, startIndex, delta.getValue());
+
+ ++lastIndex;
+ }
+
+ public void visit(CDORemoveFeatureDelta delta)
+ {
+ int startIndex = delta.getIndex();
+ int endIndex = lastIndex;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Removing at: {0}", startIndex); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, branchID, oldVersion, newVersion, startIndex);
+
+ // make room for the new item
+ moveOneUp(accessor, id, branchID, oldVersion, newVersion, startIndex + 1, endIndex);
+
+ --lastIndex;
+ }
+
+ public void visit(CDOSetFeatureDelta delta)
+ {
+ int index = delta.getIndex();
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Setting at: {0}", index); //$NON-NLS-1$
+ }
+
+ // remove the item
+ removeEntry(accessor, id, branchID, oldVersion, newVersion, index);
+
+ // create the item
+ addEntry(accessor, id, branchID, newVersion, index, delta.getValue());
+ }
+
+ public void visit(CDOUnsetFeatureDelta delta)
+ {
+ if (delta.getFeature().isUnsettable())
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Unsetting"); //$NON-NLS-1$
+ }
+
+ clearList(accessor, id, branchID, oldVersion, newVersion, lastIndex);
+ lastIndex = -1;
+ }
+
+ public void visit(CDOListFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ public void visit(CDOClearFeatureDelta delta)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Delta Clearing"); //$NON-NLS-1$
+ }
+
+ clearList(accessor, id, branchID, oldVersion, newVersion, lastIndex);
+ lastIndex = -1;
+ }
+
+ public void visit(CDOContainerFeatureDelta delta)
+ {
+ throw new ImplementationError("Should not be called"); //$NON-NLS-1$
+ }
+
+ private void moveOneUp(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion,
+ int startIndex, int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+
+ for (int index = startIndex; index <= endIndex; ++index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp moving: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index - 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 1:
+ // entry for current revision was already present.
+ // index update succeeded.
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp updated: {0} -> {1}", index, index - 1); //$NON-NLS-1$
+ }
+
+ break;
+ // no entry for current revision there.
+ case 0:
+ Object value = getValue(accessor, id, branchId, index, false);
+
+ if (value != null)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, branchId, oldVersion, newVersion, index);
+ }
+ else
+ {
+ value = getValueFromBase(accessor, id, branchId, index);
+ {
+ TRACER.format("moveOneUp add historic entry at: {0}", index); //$NON-NLS-1$
+ }
+
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, index, value);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp add: {0}", index - 1); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, id, branchId, newVersion, index - 1, value);
+ break;
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneUp Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void moveOneDown(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion,
+ int startIndex, int endIndex)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlUpdateIndex, ReuseProbability.HIGH);
+
+ for (int index = endIndex; index >= startIndex; --index)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown moving: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ int stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, index + 1);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setInt(stmtIndex++, index);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ switch (result)
+ {
+ case 1:
+ // entry for current revision was already present.
+ // index update succeeded.
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown updated: {0} -> {1}", index, index + 1); //$NON-NLS-1$
+ }
+
+ break;
+ case 0:
+ Object value = getValue(accessor, id, branchId, index, false);
+
+ if (value != null)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown remove: {0}", index); //$NON-NLS-1$
+ }
+
+ removeEntry(accessor, id, branchId, oldVersion, newVersion, index);
+ }
+ else
+ {
+ value = getValueFromBase(accessor, id, branchId, index);
+ {
+ TRACER.format("moveOneDown add historic entry at: {0}", index); //$NON-NLS-1$
+ }
+
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, index, value);
+ }
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown add: {0}", index + 1); //$NON-NLS-1$
+ }
+
+ addEntry(accessor, id, branchId, newVersion, index + 1, value);
+ break;
+ default:
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("moveOneDown Too many results: {0} -> {1}: {2}", index, index + 1, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+ }
+
+ private void addEntry(IDBStoreAccessor accessor, CDOID id, int branchId, int version, int index, Object value)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Adding value for feature {0}.{1} index {2} of {3}:{4}v{5} : {6}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, branchId, version, value);
+ }
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlInsertEntry, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, version); // versionAdded
+ pstmt.setNull(stmtIndex++, DBType.INTEGER.getCode()); // versionRemoved
+ pstmt.setInt(stmtIndex++, index);
+ typeMapping.setValue(pstmt, stmtIndex++, value);
+
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void addHistoricEntry(IDBStoreAccessor accessor, CDOID id, int branchId, int versionAdded,
+ int versionRemoved, int index, Object value)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format(
+ "Adding historic value for feature {0}.{1} index {2} of {3}:{4}v{5}-v{6} : {7}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, branchId, versionAdded, versionRemoved,
+ value);
+ }
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlInsertEntry, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, versionAdded); // versionAdded
+ pstmt.setInt(stmtIndex++, versionRemoved); // versionRemoved
+ pstmt.setInt(stmtIndex++, index);
+ typeMapping.setValue(pstmt, stmtIndex++, value);
+
+ CDODBUtil.sqlUpdate(pstmt, true);
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ private void removeEntry(IDBStoreAccessor accessor, CDOID id, int branchId, int oldVersion, int newVersion, int index)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature {0}.{1} index {2} of {3}:{4}v{5}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, branchId, newVersion);
+ }
+
+ try
+ {
+ // try to delete a temporary entry first
+ pstmt = statementCache.getPreparedStatement(sqlDeleteEntry, ReuseProbability.HIGH);
+
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, index);
+ pstmt.setInt(stmtIndex++, newVersion);
+
+ int result = CDODBUtil.sqlUpdate(pstmt, false);
+ if (result == 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry deleted: {0}", index); //$NON-NLS-1$
+ }
+ }
+ else if (result > 1)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("removeEntry Too many results: {0}: {1}", index, result); //$NON-NLS-1$
+ }
+
+ throw new DBException("Too many results"); //$NON-NLS-1$
+ }
+ else
+ {
+ // no temporary entry found, so mark the entry as removed
+ statementCache.releasePreparedStatement(pstmt);
+ pstmt = statementCache.getPreparedStatement(sqlRemoveEntry, ReuseProbability.HIGH);
+
+ stmtIndex = 1;
+ pstmt.setInt(stmtIndex++, newVersion);
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, index);
+
+ result = CDODBUtil.sqlUpdate(pstmt, false);
+
+ if (result == 0)
+ {
+ // no entry removed -> this means that we are in a branch and
+ // the entry has not been modified since the branch fork.
+ // therefore, we have to copy the base value and mark it as removed
+ Object value = getValueFromBase(accessor, id, branchId, index);
+ addHistoricEntry(accessor, id, branchId, 0, newVersion, index, value);
+ }
+ }
+ }
+ catch (SQLException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature {0}.{1} index {2} of {3}:{4}v{5} FAILED {6}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, branchId, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ catch (IllegalStateException e)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Removing value for feature {0}.{1} index {2} of {3}:{4}v{5} FAILED {6}", //$NON-NLS-1$
+ getContainingClass().getName(), getFeature().getName(), index, id, branchId, newVersion, e.getMessage());
+ }
+
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+ }
+
+ /**
+ * Read a single value from the current revision's list.
+ *
+ * @param accessor
+ * the store accessor
+ * @param id
+ * the revision's ID
+ * @param branchId
+ * the revision's branch ID
+ * @param index
+ * the index from which to get the value
+ * @param getFromBase
+ * if <code>true</code>, the value is recursively loaded from the base revision of a branch, if it is not
+ * present in the current branch (because it has not been changed since the branch fork). If
+ * <code>false</code>, <code>null</code> is returned in the former case.
+ */
+ private Object getValue(IDBStoreAccessor accessor, CDOID id, int branchId, int index, boolean getFromBase)
+ {
+ IPreparedStatementCache statementCache = accessor.getStatementCache();
+ PreparedStatement pstmt = null;
+ Object result = null;
+
+ try
+ {
+ pstmt = statementCache.getPreparedStatement(sqlGetValue, ReuseProbability.HIGH);
+ int stmtIndex = 1;
+ pstmt.setLong(stmtIndex++, CDOIDUtil.getLong(id));
+ pstmt.setInt(stmtIndex++, branchId);
+ pstmt.setInt(stmtIndex++, index);
+
+ ResultSet resultSet = pstmt.executeQuery();
+ if (resultSet.next())
+ {
+ result = typeMapping.readValue(resultSet);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Read value (index {0}) from result set: {1}", index, result); //$NON-NLS-1$
+ }
+ }
+ else
+ {
+ // value is not in this branch.
+ // -> read from base revision
+ if (getFromBase)
+ {
+ result = getValueFromBase(accessor, id, branchId, index);
+ } // else: result remains null
+ }
+ }
+ catch (SQLException e)
+ {
+ throw new DBException(e);
+ }
+ finally
+ {
+ statementCache.releasePreparedStatement(pstmt);
+ }
+
+ return result;
+ }
+
+ /**
+ * Read a single value (at a given index) from the base revision
+ *
+ * @param accessor
+ * the DBStoreAccessor
+ * @param id
+ * the ID of the revision
+ * @param branchID
+ * the ID of the current (child) branch
+ * @param index
+ * the index to read the value from
+ * @return the value which is at index <code>index</code> in revision with ID <code>id</code> in the parent branch at
+ * the base of this branch (indicated by <code>branchID</code>).
+ */
+ private Object getValueFromBase(IDBStoreAccessor accessor, CDOID id, int branchID, int index)
+ {
+ IStoreChunkReader chunkReader = createBaseChunkReader(accessor, id, branchID);
+ chunkReader.addSimpleChunk(index);
+ List<Chunk> chunks = chunkReader.executeRead();
+ return chunks.get(0).get(0);
+ }
+
+ private IStoreChunkReader createBaseChunkReader(IDBStoreAccessor accessor, CDOID id, int branchID)
+ {
+ CDOBranchPoint base = accessor.getStore().getRepository().getBranchManager().getBranch(branchID).getBase();
+ InternalCDORevision baseRevision = (InternalCDORevision)accessor.getStore().getRepository().getRevisionManager()
+ .getRevision(id, base, /* referenceChunk = */0, /* prefetchDepth = */CDORevision.DEPTH_NONE, true);
+ IStoreChunkReader chunkReader = accessor.createChunkReader(baseRevision, getFeature());
+ return chunkReader;
+ }
+
+ public final boolean queryXRefs(IDBStoreAccessor accessor, String mainTableName, String mainTableWhere,
+ QueryXRefsContext context, String idString)
+ {
+
+ String tableName = getTable().getName();
+ String listJoin = getMappingStrategy().getListJoin("a_t", "l_t");
+
+ StringBuilder builder = new StringBuilder();
+ builder.append("SELECT l_t."); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_REVISION_ID);
+ builder.append(", l_t."); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(", l_t."); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_IDX);
+ builder.append(" FROM "); //$NON-NLS-1$
+ builder.append(tableName);
+ builder.append(" AS l_t, ");//$NON-NLS-1$
+ builder.append(mainTableName);
+ builder.append(" AS a_t WHERE ");//$NON-NLS-1$
+ builder.append("a_t." + mainTableWhere);//$NON-NLS-1$
+ builder.append(listJoin);
+ builder.append(" AND "); //$NON-NLS-1$
+ builder.append(CDODBSchema.LIST_VALUE);
+ builder.append(" IN "); //$NON-NLS-1$
+ builder.append(idString);
+ String sql = builder.toString();
+
+ ResultSet resultSet = null;
+ Statement stmt = null;
+
+ try
+ {
+ stmt = accessor.getConnection().createStatement();
+ if (TRACER.isEnabled())
+ {
+ TRACER.format("Query XRefs (list): {0}", sql);
+ }
+
+ resultSet = stmt.executeQuery(sql);
+ while (resultSet.next())
+ {
+ long idLong = resultSet.getLong(1);
+ CDOID srcId = CDOIDUtil.createLong(idLong);
+ idLong = resultSet.getLong(2);
+ CDOID targetId = CDOIDUtil.createLong(idLong);
+ int idx = resultSet.getInt(3);
+
+ boolean more = context.addXRef(targetId, srcId, (EReference)getFeature(), idx);
+ if (TRACER.isEnabled())
+ {
+ TRACER.format(" add XRef to context: src={0}, tgt={1}, idx={2}", srcId, targetId, idx);
+ }
+
+ if (!more)
+ {
+ if (TRACER.isEnabled())
+ {
+ TRACER.format(" result limit reached. Ignoring further results.");
+ }
+
+ return false;
+ }
+ }
+
+ return true;
+ }
+ catch (SQLException ex)
+ {
+ throw new DBException(ex);
+ }
+ finally
+ {
+ DBUtil.close(resultSet);
+ DBUtil.close(stmt);
+ }
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java index f785e5154b..2c5d4e2ada 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditClassMapping.java @@ -10,6 +10,7 @@ * Stefan Winkler - major refactoring * Stefan Winkler - Bug 249610: [DB] Support external references (Implementation) * Lothar Werzinger - Bug 296440: [DB] Change RDB schema to improve scalability of to-many references in audit mode + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -17,6 +18,7 @@ import org.eclipse.emf.cdo.common.branch.CDOBranch; import org.eclipse.emf.cdo.common.branch.CDOBranchPoint; import org.eclipse.emf.cdo.common.id.CDOID; import org.eclipse.emf.cdo.common.id.CDOIDUtil; +import org.eclipse.emf.cdo.common.revision.CDOList; import org.eclipse.emf.cdo.common.revision.CDORevision; import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta; import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta; @@ -96,6 +98,7 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping private void initSQLStrings() { Map<EStructuralFeature, String> unsettableFields = getUnsettableFields(); + Map<EStructuralFeature, String> listSizeFields = getListSizeFields(); // ----------- Select Revision --------------------------- StringBuilder builder = new StringBuilder(); @@ -128,6 +131,15 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping } } + if (listSizeFields != null) + { + for (String fieldName : listSizeFields.values()) + { + builder.append(", "); //$NON-NLS-1$ + builder.append(fieldName); + } + } + builder.append(" FROM "); //$NON-NLS-1$ builder.append(getTable()); builder.append(" WHERE "); //$NON-NLS-1$ @@ -196,6 +208,15 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping } } + if (listSizeFields != null) + { + for (String fieldName : listSizeFields.values()) + { + builder.append(", "); //$NON-NLS-1$ + builder.append(fieldName); + } + } + builder.append(") VALUES (?, ?, ?, ?, ?, ?, ?, ?"); //$NON-NLS-1$ for (int i = 0; i < getValueMappings().size(); i++) @@ -211,6 +232,14 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping } } + if (listSizeFields != null) + { + for (int i = 0; i < listSizeFields.size(); i++) + { + builder.append(", ?"); //$NON-NLS-1$ + } + } + builder.append(")"); //$NON-NLS-1$ sqlInsertAttributes = builder.toString(); @@ -452,6 +481,19 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping mapping.setValueFromRevision(stmt, col++, revision); } + Map<EStructuralFeature, String> listSizeFields = getListSizeFields(); + if (listSizeFields != null) + { + // isSetCol now points to the first listTableSize-column + col = isSetCol; + + for (EStructuralFeature feature : listSizeFields.keySet()) + { + CDOList list = revision.getList(feature); + stmt.setInt(col++, list.size()); + } + } + CDODBUtil.sqlUpdate(stmt, true); } catch (SQLException e) @@ -611,11 +653,14 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping private InternalCDORevision newRevision; + private int branchId; + public void process(IDBStoreAccessor accessor, InternalCDORevisionDelta delta, long created) { this.accessor = accessor; this.created = created; id = delta.getID(); + branchId = delta.getBranch().getID(); oldVersion = delta.getVersion(); if (TRACER.isEnabled()) @@ -667,8 +712,9 @@ public class HorizontalAuditClassMapping extends AbstractHorizontalClassMapping public void visit(CDOListFeatureDelta delta) { + delta.apply(newRevision); IListMappingDeltaSupport listMapping = (IListMappingDeltaSupport)getListMapping(delta.getFeature()); - listMapping.processDelta(accessor, id, oldVersion, oldVersion + 1, created, delta); + listMapping.processDelta(accessor, id, branchId, oldVersion, oldVersion + 1, created, delta); } public void visit(CDOClearFeatureDelta delta) diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java index efcc187750..82d8cd0b63 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalAuditMappingStrategyWithRanges.java @@ -8,11 +8,13 @@ * Contributors: * Eike Stepper - initial API and implementation * Stefan Winkler - major refactoring + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; import org.eclipse.emf.cdo.server.db.mapping.IClassMapping; import org.eclipse.emf.cdo.server.db.mapping.IListMapping; +import org.eclipse.emf.cdo.server.internal.db.CDODBSchema; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EStructuralFeature; @@ -63,7 +65,12 @@ public class HorizontalAuditMappingStrategyWithRanges extends AbstractHorizontal @Override public String getListJoin(String attrTable, String listTable) { - // TODO: implement HorizontalAuditMappingStrategyWithRanges.getListJoin(attrTable, listTable) - throw new UnsupportedOperationException(); + String join = super.getListJoin(attrTable, listTable); + join += " AND " + listTable + "." + CDODBSchema.LIST_REVISION_VERSION_ADDED; + join += "<=" + attrTable + "." + CDODBSchema.ATTRIBUTES_VERSION; + join += " AND (" + listTable + "." + CDODBSchema.LIST_REVISION_VERSION_REMOVED; + join += " IS NULL OR " + listTable + "." + CDODBSchema.LIST_REVISION_VERSION_REMOVED; + join += ">" + attrTable + "." + CDODBSchema.ATTRIBUTES_VERSION + ")"; + return join; } } diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalBranchingClassMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalBranchingClassMapping.java index 767fb89394..92209ca79d 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalBranchingClassMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalBranchingClassMapping.java @@ -10,6 +10,7 @@ * Stefan Winkler - major refactoring * Stefan Winkler - 249610: [DB] Support external references (Implementation) * Stefan Winkler - derived branch mapping from audit mapping + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -19,9 +20,20 @@ import org.eclipse.emf.cdo.common.branch.CDOBranchPoint; import org.eclipse.emf.cdo.common.branch.CDOBranchVersion; import org.eclipse.emf.cdo.common.id.CDOID; import org.eclipse.emf.cdo.common.id.CDOIDUtil; +import org.eclipse.emf.cdo.common.model.CDOModelUtil; +import org.eclipse.emf.cdo.common.revision.CDOList; import org.eclipse.emf.cdo.common.revision.CDORevision; import org.eclipse.emf.cdo.common.revision.CDORevisionHandler; import org.eclipse.emf.cdo.common.revision.CDORevisionManager; +import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta; +import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta; +import org.eclipse.emf.cdo.common.revision.delta.CDOContainerFeatureDelta; +import org.eclipse.emf.cdo.common.revision.delta.CDOFeatureDeltaVisitor; +import org.eclipse.emf.cdo.common.revision.delta.CDOListFeatureDelta; +import org.eclipse.emf.cdo.common.revision.delta.CDOMoveFeatureDelta; +import org.eclipse.emf.cdo.common.revision.delta.CDORemoveFeatureDelta; +import org.eclipse.emf.cdo.common.revision.delta.CDOSetFeatureDelta; +import org.eclipse.emf.cdo.common.revision.delta.CDOUnsetFeatureDelta; import org.eclipse.emf.cdo.eresource.EresourcePackage; import org.eclipse.emf.cdo.server.IRepository; import org.eclipse.emf.cdo.server.IStoreAccessor.QueryXRefsContext; @@ -30,13 +42,17 @@ import org.eclipse.emf.cdo.server.db.IDBStoreAccessor; import org.eclipse.emf.cdo.server.db.IPreparedStatementCache; import org.eclipse.emf.cdo.server.db.IPreparedStatementCache.ReuseProbability; import org.eclipse.emf.cdo.server.db.mapping.IClassMappingAuditSupport; +import org.eclipse.emf.cdo.server.db.mapping.IClassMappingDeltaSupport; import org.eclipse.emf.cdo.server.db.mapping.IListMapping; +import org.eclipse.emf.cdo.server.db.mapping.IListMappingDeltaSupport; import org.eclipse.emf.cdo.server.db.mapping.ITypeMapping; import org.eclipse.emf.cdo.server.internal.db.CDODBSchema; import org.eclipse.emf.cdo.server.internal.db.DBStore; import org.eclipse.emf.cdo.server.internal.db.bundle.OM; import org.eclipse.emf.cdo.spi.common.commit.CDOChangeSetSegment; import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevision; +import org.eclipse.emf.cdo.spi.common.revision.InternalCDORevisionDelta; +import org.eclipse.emf.cdo.spi.server.InternalRepository; import org.eclipse.net4j.db.DBException; import org.eclipse.net4j.db.DBType; @@ -64,8 +80,100 @@ import java.util.Set; * @since 3.0 */ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapping implements - IClassMappingAuditSupport + IClassMappingAuditSupport, IClassMappingDeltaSupport { + /** + * @author Stefan Winkler + */ + private class FeatureDeltaWriter implements CDOFeatureDeltaVisitor + { + private IDBStoreAccessor accessor; + + private long created; + + private CDOID id; + + private CDOBranch targetBranch; + + private int oldVersion; + + private int newVersion; + + private InternalCDORevision newRevision; + + public void process(IDBStoreAccessor accessor, InternalCDORevisionDelta delta, long created) + { + this.accessor = accessor; + this.created = created; + id = delta.getID(); + oldVersion = delta.getVersion(); + + if (TRACER.isEnabled()) + { + TRACER.format("FeatureDeltaWriter: old version: {0}, new version: {1}", oldVersion, oldVersion + 1); //$NON-NLS-1$ + } + + InternalCDORevision originalRevision = (InternalCDORevision)accessor.getTransaction().getRevision(id); + newRevision = originalRevision.copy(); + targetBranch = accessor.getTransaction().getBranch(); + newRevision.adjustForCommit(targetBranch, created); + + newVersion = newRevision.getVersion(); + + // process revision delta tree + delta.accept(this); + + if (newVersion != CDORevision.FIRST_VERSION) + { + reviseOldRevision(accessor, id, delta.getBranch(), newRevision.getTimeStamp() - 1); + } + + writeValues(accessor, newRevision); + } + + public void visit(CDOMoveFeatureDelta delta) + { + throw new ImplementationError("Should not be called"); //$NON-NLS-1$ + } + + public void visit(CDOAddFeatureDelta delta) + { + throw new ImplementationError("Should not be called"); //$NON-NLS-1$ + } + + public void visit(CDORemoveFeatureDelta delta) + { + throw new ImplementationError("Should not be called"); //$NON-NLS-1$ + } + + public void visit(CDOSetFeatureDelta delta) + { + delta.apply(newRevision); + } + + public void visit(CDOUnsetFeatureDelta delta) + { + delta.apply(newRevision); + } + + public void visit(CDOListFeatureDelta delta) + { + delta.apply(newRevision); + IListMappingDeltaSupport listMapping = (IListMappingDeltaSupport)getListMapping(delta.getFeature()); + listMapping.processDelta(accessor, id, targetBranch.getID(), oldVersion, newVersion, created, delta); + } + + public void visit(CDOClearFeatureDelta delta) + { + throw new ImplementationError("Should not be called"); //$NON-NLS-1$ + } + + public void visit(CDOContainerFeatureDelta delta) + { + delta.apply(newRevision); + } + } + private static final ContextTracer TRACER = new ContextTracer(OM.DEBUG, HorizontalBranchingClassMapping.class); private String sqlInsertAttributes; @@ -84,6 +192,15 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp private String sqlSelectForChangeSet; + private ThreadLocal<FeatureDeltaWriter> deltaWriter = new ThreadLocal<FeatureDeltaWriter>() + { + @Override + protected FeatureDeltaWriter initialValue() + { + return new FeatureDeltaWriter(); + } + }; + public HorizontalBranchingClassMapping(AbstractHorizontalMappingStrategy mappingStrategy, EClass eClass) { super(mappingStrategy, eClass); @@ -100,6 +217,7 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp private void initSQLStrings() { Map<EStructuralFeature, String> unsettableFields = getUnsettableFields(); + Map<EStructuralFeature, String> listSizeFields = getListSizeFields(); // ----------- Select Revision --------------------------- StringBuilder builder = new StringBuilder(); @@ -132,6 +250,15 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp } } + if (listSizeFields != null) + { + for (String fieldName : listSizeFields.values()) + { + builder.append(", "); //$NON-NLS-1$ + builder.append(fieldName); + } + } + builder.append(" FROM "); //$NON-NLS-1$ builder.append(getTable()); builder.append(" WHERE "); //$NON-NLS-1$ @@ -204,6 +331,15 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp } } + if (listSizeFields != null) + { + for (String fieldName : listSizeFields.values()) + { + builder.append(", "); //$NON-NLS-1$ + builder.append(fieldName); + } + } + builder.append(") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?"); //$NON-NLS-1$ for (int i = 0; i < getValueMappings().size(); i++) @@ -219,6 +355,14 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp } } + if (listSizeFields != null) + { + for (int i = 0; i < listSizeFields.size(); i++) + { + builder.append(", ?"); //$NON-NLS-1$ + } + } + builder.append(")"); //$NON-NLS-1$ sqlInsertAttributes = builder.toString(); @@ -491,6 +635,19 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp mapping.setValueFromRevision(stmt, col++, revision); } + Map<EStructuralFeature, String> listSizeFields = getListSizeFields(); + if (listSizeFields != null) + { + // isSetCol now points to the first listTableSize-column + col = isSetCol; + + for (EStructuralFeature feature : listSizeFields.keySet()) + { + CDOList list = revision.getList(feature); + stmt.setInt(col++, list.size()); + } + } + CDODBUtil.sqlUpdate(stmt, true); } catch (SQLException e) @@ -538,6 +695,18 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp mapping.setDefaultValue(stmt, col++); } + Map<EStructuralFeature, String> listSizeFields = getListSizeFields(); + if (listSizeFields != null) + { + // list size columns begin after isSet-columns + col = isSetCol; + + for (int i = 0; i < listSizeFields.size(); i++) + { + stmt.setInt(col++, 0); + } + } + CDODBUtil.sqlUpdate(stmt, true); } catch (SQLException e) @@ -593,7 +762,7 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp { // put new objects into objectTypeMapper long timeStamp = revision.getTimeStamp(); - HorizontalBranchingMappingStrategy mappingStrategy = (HorizontalBranchingMappingStrategy)getMappingStrategy(); + AbstractHorizontalMappingStrategy mappingStrategy = (AbstractHorizontalMappingStrategy)getMappingStrategy(); mappingStrategy.putObjectType(accessor, timeStamp, id, getEClass()); } else if (revision.getVersion() > CDOBranchVersion.FIRST_VERSION) @@ -873,4 +1042,77 @@ public class HorizontalBranchingClassMapping extends AbstractHorizontalClassMapp return builder.toString(); } + + public void writeRevisionDelta(IDBStoreAccessor accessor, InternalCDORevisionDelta delta, long created, + OMMonitor monitor) + { + monitor.begin(); + + try + { + if (accessor.getTransaction().getBranch() != delta.getBranch()) + { + // new branch -> decide, if branch should be copied + if (((HorizontalBranchingMappingStrategyWithRanges)getMappingStrategy()).shallCopyOnBranch()) + { + doCopyOnBranch(accessor, delta, created, monitor.fork()); + return; + } + } + + Async async = null; + + try + { + async = monitor.forkAsync(); + FeatureDeltaWriter writer = deltaWriter.get(); + writer.process(accessor, delta, created); + } + finally + { + if (async != null) + { + async.stop(); + } + } + } + finally + { + monitor.done(); + } + } + + private void doCopyOnBranch(IDBStoreAccessor accessor, InternalCDORevisionDelta delta, long created, OMMonitor monitor) + { + monitor.begin(2); + try + { + InternalRepository repository = (InternalRepository)accessor.getStore().getRepository(); + + InternalCDORevision oldRevision = (InternalCDORevision)accessor.getTransaction().getRevision(delta.getID()); + if (oldRevision == null) + { + throw new IllegalStateException("Origin revision not found for " + delta); + } + + // Make sure all chunks are loaded + for (EStructuralFeature feature : CDOModelUtil.getAllPersistentFeatures(oldRevision.getEClass())) + { + if (feature.isMany()) + { + repository.ensureChunk(oldRevision, feature, 0, oldRevision.getList(feature).size()); + } + } + + InternalCDORevision newRevision = oldRevision.copy(); + newRevision.adjustForCommit(accessor.getTransaction().getBranch(), created); + delta.apply(newRevision); + monitor.worked(); + writeRevision(accessor, newRevision, false, monitor.fork()); + } + finally + { + monitor.done(); + } + } } diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalBranchingMappingStrategyWithRanges.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalBranchingMappingStrategyWithRanges.java new file mode 100644 index 0000000000..c9c09660a5 --- /dev/null +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalBranchingMappingStrategyWithRanges.java @@ -0,0 +1,96 @@ +/**
+ * Copyright (c) 2004 - 2010 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Eike Stepper - initial API and implementation
+ * Stefan Winkler - major refactoring
+ * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy
+ */
+package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal;
+
+import org.eclipse.emf.cdo.server.db.mapping.IClassMapping;
+import org.eclipse.emf.cdo.server.db.mapping.IListMapping;
+import org.eclipse.emf.cdo.server.internal.db.CDODBSchema;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EStructuralFeature;
+
+/**
+ * @author Eike Stepper
+ * @since 2.0
+ */
+public class HorizontalBranchingMappingStrategyWithRanges extends AbstractHorizontalMappingStrategy
+{
+ private static final String PROP_COPY_ON_BRANCH = "copyOnBranch";
+
+ private boolean copyOnBranch;
+
+ public HorizontalBranchingMappingStrategyWithRanges()
+ {
+ }
+
+ public boolean hasAuditSupport()
+ {
+ return true;
+ }
+
+ public boolean hasBranchingSupport()
+ {
+ return true;
+ }
+
+ public boolean hasDeltaSupport()
+ {
+ return true;
+ }
+
+ public boolean shallCopyOnBranch()
+ {
+ return copyOnBranch;
+ }
+
+ @Override
+ public IClassMapping doCreateClassMapping(EClass eClass)
+ {
+ return new HorizontalBranchingClassMapping(this, eClass);
+ }
+
+ @Override
+ public IListMapping doCreateListMapping(EClass containingClass, EStructuralFeature feature)
+ {
+ return new BranchingListTableMappingWithRanges(this, containingClass, feature);
+ }
+
+ @Override
+ public IListMapping doCreateFeatureMapMapping(EClass containingClass, EStructuralFeature feature)
+ {
+ return new BranchingFeatureMapTableMappingWithRanges(this, containingClass, feature);
+ }
+
+ @Override
+ public String getListJoin(String attrTable, String listTable)
+ {
+ String join = super.getListJoin(attrTable, listTable);
+ join += " AND " + listTable + "." + CDODBSchema.LIST_REVISION_VERSION_ADDED;
+ join += "<=" + attrTable + "." + CDODBSchema.ATTRIBUTES_VERSION;
+ join += " AND (" + listTable + "." + CDODBSchema.LIST_REVISION_VERSION_REMOVED;
+ join += " IS NULL OR " + listTable + "." + CDODBSchema.LIST_REVISION_VERSION_REMOVED;
+ join += ">" + attrTable + "." + CDODBSchema.ATTRIBUTES_VERSION;
+ join += ") AND " + attrTable + "." + CDODBSchema.ATTRIBUTES_BRANCH;
+ join += "=" + listTable + "." + CDODBSchema.LIST_REVISION_BRANCH;
+ return join;
+ }
+
+ @Override
+ protected void doAfterActivate() throws Exception
+ {
+ super.doAfterActivate();
+
+ String value = getProperties().get(PROP_COPY_ON_BRANCH);
+ copyOnBranch = value == null ? false : Boolean.valueOf(value);
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalNonAuditClassMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalNonAuditClassMapping.java index 8c5110324f..af16f15e53 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalNonAuditClassMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/HorizontalNonAuditClassMapping.java @@ -9,6 +9,7 @@ * Eike Stepper - initial API and implementation * Stefan Winkler - major refactoring * Stefan Winkler - 249610: [DB] Support external references (Implementation) + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -16,6 +17,7 @@ import org.eclipse.emf.cdo.common.branch.CDOBranch; import org.eclipse.emf.cdo.common.branch.CDOBranchPoint; import org.eclipse.emf.cdo.common.id.CDOID; import org.eclipse.emf.cdo.common.id.CDOIDUtil; +import org.eclipse.emf.cdo.common.revision.CDOList; import org.eclipse.emf.cdo.common.revision.CDORevision; import org.eclipse.emf.cdo.common.revision.delta.CDOAddFeatureDelta; import org.eclipse.emf.cdo.common.revision.delta.CDOClearFeatureDelta; @@ -97,6 +99,7 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi private void initSQLStrings() { Map<EStructuralFeature, String> unsettableFields = getUnsettableFields(); + Map<EStructuralFeature, String> listSizeFields = getListSizeFields(); // ----------- Select Revision --------------------------- StringBuilder builder = new StringBuilder(); @@ -129,6 +132,15 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi } } + if (listSizeFields != null) + { + for (String fieldName : listSizeFields.values()) + { + builder.append(", "); //$NON-NLS-1$ + builder.append(fieldName); + } + } + builder.append(" FROM "); //$NON-NLS-1$ builder.append(getTable()); builder.append(" WHERE "); //$NON-NLS-1$ @@ -174,6 +186,15 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi } } + if (listSizeFields != null) + { + for (String fieldName : listSizeFields.values()) + { + builder.append(", "); //$NON-NLS-1$ + builder.append(fieldName); + } + } + builder.append(") VALUES (?, ?, "); //$NON-NLS-1$ builder.append("?, ?, ?, ?, ?, ?"); //$NON-NLS-1$ for (int i = 0; i < getValueMappings().size(); i++) @@ -189,6 +210,14 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi } } + if (listSizeFields != null) + { + for (int i = 0; i < listSizeFields.size(); i++) + { + builder.append(", ?"); //$NON-NLS-1$ + } + } + builder.append(")"); //$NON-NLS-1$ sqlInsertAttributes = builder.toString(); @@ -268,6 +297,19 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi mapping.setValueFromRevision(stmt, col++, revision); } + Map<EStructuralFeature, String> listSizeFields = getListSizeFields(); + if (listSizeFields != null) + { + // isSetCol now points to the first listTableSize-column + col = isSetCol; + + for (EStructuralFeature feature : listSizeFields.keySet()) + { + CDOList list = revision.getList(feature); + stmt.setInt(col++, list.size()); + } + } + CDODBUtil.sqlUpdate(stmt, true); } catch (SQLException e) @@ -466,47 +508,55 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi private List<Pair<ITypeMapping, Object>> attributeChanges; + private List<Pair<EStructuralFeature, Integer>> listSizeChanges; + private int newContainingFeatureID; private CDOID newContainerID; private CDOID newResourceID; + private int branchId; + + private int newVersion; + + /* + * this is a temporary copy of the revision to track list size changes... + */ + private InternalCDORevision tempRevision; + public FeatureDeltaWriter() { attributeChanges = new ArrayList<Pair<ITypeMapping, Object>>(); + listSizeChanges = new ArrayList<Pair<EStructuralFeature, Integer>>(); } protected void reset() { attributeChanges.clear(); + listSizeChanges.clear(); updateContainer = false; } public void process(IDBStoreAccessor a, CDORevisionDelta d, long c) { // set context - - reset(); id = d.getID(); + + branchId = d.getBranch().getID(); oldVersion = d.getVersion(); - int newVersion = oldVersion + 1; + newVersion = oldVersion + 1; created = c; accessor = a; + tempRevision = (InternalCDORevision)accessor.getTransaction().getRevision(id).copy(); + // process revision delta tree d.accept(this); - // update attributes - if (updateContainer) - { - updateAttributes(accessor, id, newVersion, created, newContainerID, newContainingFeatureID, newResourceID, - attributeChanges); - } - else - { - updateAttributes(accessor, id, newVersion, created, attributeChanges); - } + updateAttributes(); + // clean up + reset(); } public void visit(CDOMoveFeatureDelta delta) @@ -540,8 +590,19 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi public void visit(CDOListFeatureDelta delta) { - IListMappingDeltaSupport listMapping = (IListMappingDeltaSupport)getListMapping(delta.getFeature()); - listMapping.processDelta(accessor, id, oldVersion, oldVersion + 1, created, delta); + EStructuralFeature feature = delta.getFeature(); + + IListMappingDeltaSupport listMapping = (IListMappingDeltaSupport)getListMapping(feature); + listMapping.processDelta(accessor, id, branchId, oldVersion, oldVersion + 1, created, delta); + + int oldSize = tempRevision.getList(feature).size(); + delta.apply(tempRevision); + int newSize = tempRevision.getList(feature).size(); + + if (oldSize != newSize) + { + listSizeChanges.add(new Pair<EStructuralFeature, Integer>(feature, newSize)); + } } public void visit(CDOClearFeatureDelta delta) @@ -566,130 +627,122 @@ public class HorizontalNonAuditClassMapping extends AbstractHorizontalClassMappi newResourceID = delta.getResourceID(); updateContainer = true; } - } - - public void updateAttributes(IDBStoreAccessor accessor, CDOID id, int newVersion, long created, CDOID newContainerId, - int newContainingFeatureId, CDOID newResourceId, List<Pair<ITypeMapping, Object>> attributeChanges) - { - IPreparedStatementCache statementCache = accessor.getStatementCache(); - PreparedStatement stmt = null; - try + private void updateAttributes() { - int col = 1; - stmt = statementCache.getPreparedStatement(buildUpdateStatement(attributeChanges, true), ReuseProbability.MEDIUM); - stmt.setInt(col++, newVersion); - stmt.setLong(col++, created); - stmt.setLong(col++, CDODBUtil.convertCDOIDToLong(getExternalReferenceManager(), accessor, newResourceId, created)); - stmt.setLong(col++, - CDODBUtil.convertCDOIDToLong(getExternalReferenceManager(), accessor, newContainerId, created)); - stmt.setInt(col++, newContainingFeatureId); + IPreparedStatementCache statementCache = accessor.getStatementCache(); + PreparedStatement stmt = null; - col = setUpdateAttributeValues(attributeChanges, stmt, col); + try + { + int col = 1; - stmt.setLong(col++, CDOIDUtil.getLong(id)); + stmt = statementCache.getPreparedStatement(buildUpdateStatement(), ReuseProbability.MEDIUM); + stmt.setInt(col++, newVersion); + stmt.setLong(col++, created); + if (updateContainer) + { + stmt.setLong(col++, + CDODBUtil.convertCDOIDToLong(getExternalReferenceManager(), accessor, newResourceID, created)); + stmt.setLong(col++, + CDODBUtil.convertCDOIDToLong(getExternalReferenceManager(), accessor, newContainerID, created)); + stmt.setInt(col++, newContainingFeatureID); + } - CDODBUtil.sqlUpdate(stmt, true); - } - catch (SQLException e) - { - throw new DBException(e); - } - finally - { - statementCache.releasePreparedStatement(stmt); - } - } + col = setUpdateAttributeValues(attributeChanges, stmt, col); + col = setUpdateListSizeChanges(listSizeChanges, stmt, col); - private int setUpdateAttributeValues(List<Pair<ITypeMapping, Object>> attributeChanges, PreparedStatement stmt, - int col) throws SQLException - { - for (Pair<ITypeMapping, Object> change : attributeChanges) - { - ITypeMapping typeMapping = change.getElement1(); - Object value = change.getElement2(); - if (typeMapping.getFeature().isUnsettable()) + stmt.setLong(col++, CDOIDUtil.getLong(id)); + + CDODBUtil.sqlUpdate(stmt, true); + } + catch (SQLException e) { - // feature is unsettable - if (value == null) - { - // feature is unset - typeMapping.setDefaultValue(stmt, col++); - stmt.setBoolean(col++, false); - } - else - { - // feature is set - typeMapping.setValue(stmt, col++, value); - stmt.setBoolean(col++, true); - } + throw new DBException(e); } - else + finally { - typeMapping.setValue(stmt, col++, change.getElement2()); + statementCache.releasePreparedStatement(stmt); } } - return col; - } - - public void updateAttributes(IDBStoreAccessor accessor, CDOID id, int newVersion, long created, - List<Pair<ITypeMapping, Object>> attributeChanges) - { - IPreparedStatementCache statementCache = accessor.getStatementCache(); - PreparedStatement stmt = null; - - try + private String buildUpdateStatement() { - stmt = statementCache - .getPreparedStatement(buildUpdateStatement(attributeChanges, false), ReuseProbability.MEDIUM); - - int col = 1; + StringBuilder builder = new StringBuilder(sqlUpdatePrefix); + if (updateContainer) + { + builder.append(sqlUpdateContainerPart); + } - stmt.setInt(col++, newVersion); - stmt.setLong(col++, created); + for (Pair<ITypeMapping, Object> change : attributeChanges) + { + builder.append(", "); //$NON-NLS-1$ + ITypeMapping typeMapping = change.getElement1(); + builder.append(typeMapping.getField()); + builder.append("=?"); //$NON-NLS-1$ - col = setUpdateAttributeValues(attributeChanges, stmt, col); + if (typeMapping.getFeature().isUnsettable()) + { + builder.append(", "); //$NON-NLS-1$ + builder.append(getUnsettableFields().get(typeMapping.getFeature())); + builder.append("=?"); //$NON-NLS-1$ + } + } - stmt.setLong(col++, CDOIDUtil.getLong(id)); + for (Pair<EStructuralFeature, Integer> change : listSizeChanges) + { + builder.append(", "); //$NON-NLS-1$ + EStructuralFeature feature = change.getElement1(); + builder.append(getListSizeFields().get(feature)); + builder.append("=?"); //$NON-NLS-1$ + } - CDODBUtil.sqlUpdate(stmt, true); - } - catch (SQLException e) - { - throw new DBException(e); + builder.append(sqlUpdateAffix); + return builder.toString(); } - finally - { - statementCache.releasePreparedStatement(stmt); - } - } - private String buildUpdateStatement(List<Pair<ITypeMapping, Object>> attributeChanges, boolean withContainment) - { - StringBuilder builder = new StringBuilder(sqlUpdatePrefix); - if (withContainment) + private int setUpdateAttributeValues(List<Pair<ITypeMapping, Object>> attributeChanges, PreparedStatement stmt, + int col) throws SQLException { - builder.append(sqlUpdateContainerPart); + for (Pair<ITypeMapping, Object> change : attributeChanges) + { + ITypeMapping typeMapping = change.getElement1(); + Object value = change.getElement2(); + if (typeMapping.getFeature().isUnsettable()) + { + // feature is unsettable + if (value == null) + { + // feature is unset + typeMapping.setDefaultValue(stmt, col++); + stmt.setBoolean(col++, false); + } + else + { + // feature is set + typeMapping.setValue(stmt, col++, value); + stmt.setBoolean(col++, true); + } + } + else + { + typeMapping.setValue(stmt, col++, change.getElement2()); + } + } + + return col; } - for (Pair<ITypeMapping, Object> change : attributeChanges) + private int setUpdateListSizeChanges(List<Pair<EStructuralFeature, Integer>> attributeChanges, + PreparedStatement stmt, int col) throws SQLException { - builder.append(", "); //$NON-NLS-1$ - ITypeMapping typeMapping = change.getElement1(); - builder.append(typeMapping.getField()); - builder.append("=?"); //$NON-NLS-1$ - - if (typeMapping.getFeature().isUnsettable()) + for (Pair<EStructuralFeature, Integer> change : listSizeChanges) { - builder.append(", "); //$NON-NLS-1$ - builder.append(getUnsettableFields().get(typeMapping.getFeature())); - builder.append("=?"); //$NON-NLS-1$ + stmt.setInt(col++, change.getElement2()); } - } - builder.append(sqlUpdateAffix); - return builder.toString(); + return col; + } } @Override diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditFeatureMapTableMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditFeatureMapTableMapping.java index fec311a9cb..7add5d558e 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditFeatureMapTableMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditFeatureMapTableMapping.java @@ -9,6 +9,7 @@ * Eike Stepper - initial API and implementation * Stefan Winkler - 271444: [DB] Multiple refactorings bug 271444 * Christopher Albert - 254455: [DB] Support FeatureMaps bug 254455 + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -513,8 +514,8 @@ public class NonAuditFeatureMapTableMapping extends AbstractFeatureMapTableMappi } } - public void processDelta(final IDBStoreAccessor accessor, final CDOID id, int oldVersion, final int newVersion, - long created, CDOListFeatureDelta listDelta) + public void processDelta(final IDBStoreAccessor accessor, final CDOID id, final int branchId, int oldVersion, + final int newVersion, long created, CDOListFeatureDelta listDelta) { CDOFeatureDeltaVisitor visitor = new CDOFeatureDeltaVisitor() { diff --git a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditListTableMapping.java b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditListTableMapping.java index 08f274c5fc..87844728c4 100644 --- a/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditListTableMapping.java +++ b/plugins/org.eclipse.emf.cdo.server.db/src/org/eclipse/emf/cdo/server/internal/db/mapping/horizontal/NonAuditListTableMapping.java @@ -8,6 +8,7 @@ * Contributors: * Eike Stepper - initial API and implementation * Stefan Winkler - 271444: [DB] Multiple refactorings bug 271444 + * Stefan Winkler - Bug 329025: [DB] Support branching for range-based mapping strategy */ package org.eclipse.emf.cdo.server.internal.db.mapping.horizontal; @@ -195,8 +196,8 @@ public class NonAuditListTableMapping extends AbstractListTableMapping implement } } - public void processDelta(final IDBStoreAccessor accessor, final CDOID id, int oldVersion, final int newVersion, - long created, CDOListFeatureDelta delta) + public void processDelta(final IDBStoreAccessor accessor, final CDOID id, int branchId, int oldVersion, + final int newVersion, long created, CDOListFeatureDelta delta) { CDOBranchPoint main = accessor.getStore().getRepository().getBranchManager().getMainBranch().getHead(); diff --git a/plugins/org.eclipse.emf.cdo.tests.db/CDO AllTests (H2 branching).launch b/plugins/org.eclipse.emf.cdo.tests.db/CDO AllTests (H2 branching).launch index 9f0aede20b..fd724c9b09 100644 --- a/plugins/org.eclipse.emf.cdo.tests.db/CDO AllTests (H2 branching).launch +++ b/plugins/org.eclipse.emf.cdo.tests.db/CDO AllTests (H2 branching).launch @@ -12,5 +12,5 @@ <stringAttribute key="org.eclipse.jdt.junit.TEST_KIND" value="org.eclipse.jdt.junit.loader.junit3"/> <stringAttribute key="org.eclipse.jdt.launching.MAIN_TYPE" value="org.eclipse.emf.cdo.tests.db.AllTestsDBH2Branching"/> <stringAttribute key="org.eclipse.jdt.launching.PROJECT_ATTR" value="org.eclipse.emf.cdo.tests.db"/> -<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms40m -Xmx512m"/> +<stringAttribute key="org.eclipse.jdt.launching.VM_ARGUMENTS" value="-Xms40m -Xmx1024m"/> </launchConfiguration> diff --git a/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java b/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java index e6387ccc74..a665b2e61c 100644 --- a/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java +++ b/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2.java @@ -47,7 +47,7 @@ public class AllTestsDBH2 extends DBConfigs @Override protected void initConfigSuites(TestSuite parent) { - addScenario(parent, COMBINED, AllTestsDBH2.H2.ReusableFolder.AUDIT_INSTANCE, JVM, NATIVE); + //addScenario(parent, COMBINED, AllTestsDBH2.H2.ReusableFolder.AUDIT_INSTANCE, JVM, NATIVE); addScenario(parent, COMBINED, AllTestsDBH2.H2.ReusableFolder.RANGE_INSTANCE, JVM, NATIVE); } diff --git a/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2Branching.java b/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2Branching.java index a90c7d7961..63c928c0b8 100644 --- a/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2Branching.java +++ b/plugins/org.eclipse.emf.cdo.tests.db/src/org/eclipse/emf/cdo/tests/db/AllTestsDBH2Branching.java @@ -11,7 +11,6 @@ package org.eclipse.emf.cdo.tests.db; import org.eclipse.emf.cdo.server.IRepository; -import org.eclipse.emf.cdo.server.db.CDODBUtil; import org.eclipse.emf.cdo.server.db.mapping.IMappingStrategy; import org.eclipse.emf.cdo.tests.BranchingTest; import org.eclipse.emf.cdo.tests.BranchingTestSameSession; @@ -33,6 +32,7 @@ import java.io.File; import java.sql.Connection; import java.sql.Statement; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -53,6 +53,8 @@ public class AllTestsDBH2Branching extends DBConfigs protected void initConfigSuites(TestSuite parent) { addScenario(parent, COMBINED, H2Branching.ReusableFolder.INSTANCE, JVM, NATIVE); + addScenario(parent, COMBINED, H2Branching.ReusableFolder.RANGE_INSTANCE, JVM, NATIVE); + addScenario(parent, COMBINED, H2Branching.ReusableFolder.CopyOnBranch.INSTANCE, JVM, NATIVE); } @Override @@ -85,13 +87,17 @@ public class AllTestsDBH2Branching extends DBConfigs { private static final long serialVersionUID = 1L; - public static final H2Branching INSTANCE = new H2Branching("DBStore: H2 (branching)"); + public static final H2Branching INSTANCE = new H2Branching("DBStore: H2 (branching)", + "org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalBranchingMappingStrategy"); protected transient File dbFolder; - public H2Branching(String name) + private String mappingStrategy; + + public H2Branching(String name, String mappingStrategy) { super(name); + this.mappingStrategy = mappingStrategy; } @Override @@ -102,10 +108,19 @@ public class AllTestsDBH2Branching extends DBConfigs props.put(IRepository.Props.SUPPORTING_BRANCHES, "true"); } + @SuppressWarnings("unchecked") @Override protected IMappingStrategy createMappingStrategy() { - return CDODBUtil.createHorizontalMappingStrategy(true, true); + try + { + Class<IMappingStrategy> clazz = (Class<IMappingStrategy>)Class.forName(mappingStrategy); + return clazz.newInstance(); + } + catch (Exception ex) + { + throw WrappedException.wrap(ex); + } } @Override @@ -125,6 +140,7 @@ public class AllTestsDBH2Branching extends DBConfigs JdbcDataSource dataSource = new JdbcDataSource(); dataSource.setURL("jdbc:h2:" + dbFolder.getAbsolutePath() + "/h2test;SCHEMA=" + repoName); + return dataSource; } @@ -145,7 +161,12 @@ public class AllTestsDBH2Branching extends DBConfigs { private static final long serialVersionUID = 1L; - public static final ReusableFolder INSTANCE = new ReusableFolder("DBStore: H2 (branching)"); + public static final ReusableFolder INSTANCE = new ReusableFolder("DBStore: H2 (branching)", + "org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalBranchingMappingStrategy"); + + public static final ReusableFolder RANGE_INSTANCE = new ReusableFolder( + "DBStore: H2 (Reusable Folder, branching, range-based mapping strategy)", + "org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalBranchingMappingStrategyWithRanges"); private static File reusableFolder; @@ -153,9 +174,9 @@ public class AllTestsDBH2Branching extends DBConfigs private transient ArrayList<String> repoNames = new ArrayList<String>(); - public ReusableFolder(String name) + public ReusableFolder(String name, String mappingStrategy) { - super(name); + super(name, mappingStrategy); } @Override @@ -188,6 +209,15 @@ public class AllTestsDBH2Branching extends DBConfigs } stmt.execute("CREATE SCHEMA IF NOT EXISTS " + repoName); + + /* + * final WebServer webServer = new WebServer(); webServer.init(new String[] { "-webPort", "7778" }); + * webServer.start(); System.out.println("----------------------------------"); + * System.out.println("----------------------------------"); System.out.println(webServer.addSession(conn)); + * System.out.println("----------------------------------"); + * System.out.println("----------------------------------"); new Thread() { + * @Override public void run() { webServer.listen(); } }.start(); + */ } catch (Exception ex) { @@ -195,7 +225,6 @@ public class AllTestsDBH2Branching extends DBConfigs } finally { - DBUtil.close(conn); DBUtil.close(stmt); } @@ -235,6 +264,32 @@ public class AllTestsDBH2Branching extends DBConfigs DBUtil.close(connection); } } + + public static class CopyOnBranch extends ReusableFolder + { + private static final long serialVersionUID = 1L; + + public static final ReusableFolder INSTANCE = new CopyOnBranch( + "DBStore: H2 (Reusable Folder, branching, range-based mapping strategy copyOnBranch)", + "org.eclipse.emf.cdo.server.internal.db.mapping.horizontal.HorizontalBranchingMappingStrategyWithRanges"); + + public CopyOnBranch(String name, String ms) + { + super(name, ms); + } + + @Override + protected IMappingStrategy createMappingStrategy() + { + IMappingStrategy ms = super.createMappingStrategy(); + + Map<String, String> properties = new HashMap<String, String>(); + properties.put("copyOnBranch", "true"); + ms.setProperties(properties); + + return ms; + } + } } } } diff --git a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/AllConfigs.java b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/AllConfigs.java index 736a570d19..3b49914eb3 100644 --- a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/AllConfigs.java +++ b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/AllConfigs.java @@ -128,6 +128,7 @@ public abstract class AllConfigs extends ConfigTestSuite testClasses.add(RevisionManagerTestClientSide.class); testClasses.add(BranchingTest.class); testClasses.add(BranchingTestSameSession.class); + testClasses.add(BranchingTestWithCacheClear.class); testClasses.add(MergingTest.class); testClasses.add(ViewTest.class); testClasses.add(TransactionTest.class); @@ -147,6 +148,8 @@ public abstract class AllConfigs extends ConfigTestSuite testClasses.add(PartialCommitTest.class); testClasses.add(MetaTest.class); testClasses.add(RevisionDeltaTest.class); + testClasses.add(RevisionDeltaInBranchTest.class); + testClasses.add(RevisionDeltaCascadingBranchesTest.class); testClasses.add(IndexReconstructionTest.class); testClasses.add(AutoAttacherTest.class); testClasses.add(SavePointTest.class); diff --git a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/BranchingTest.java b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/BranchingTest.java index 7724304f75..423db2b512 100644 --- a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/BranchingTest.java +++ b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/BranchingTest.java @@ -385,6 +385,148 @@ public class BranchingTest extends AbstractCDOTest session.close(); } + public void testCommitAddOrderDetail() throws Exception + { + CDOSession session = openSession1(); + CDOBranchManager branchManager = session.getBranchManager(); + + // Commit to main branch + CDOBranch mainBranch = branchManager.getMainBranch(); + CDOTransaction transaction = session.openTransaction(mainBranch); + assertEquals(mainBranch, transaction.getBranch()); + assertEquals(CDOBranchPoint.UNSPECIFIED_DATE, transaction.getTimeStamp()); + + Product1 product = getModel1Factory().createProduct1(); + product.setName("CDO"); + + OrderDetail orderDetail = getModel1Factory().createOrderDetail(); + orderDetail.setProduct(product); + orderDetail.setPrice(5); + + CDOResource resource = transaction.createResource("/res"); + resource.getContents().add(product); + resource.getContents().add(orderDetail); + + CDOCommitInfo commitInfo = transaction.commit(); + dumpAll(session); + assertEquals(mainBranch, commitInfo.getBranch()); + long commitTime1 = commitInfo.getTimeStamp(); + transaction.close(); + + // Commit to sub branch + CDOBranch subBranch = mainBranch.createBranch("subBranch", commitTime1); + transaction = session.openTransaction(subBranch); + assertEquals(subBranch, transaction.getBranch()); + assertEquals(CDOBranchPoint.UNSPECIFIED_DATE, transaction.getTimeStamp()); + + resource = transaction.getResource("/res"); + orderDetail = (OrderDetail)resource.getContents().get(1); + assertEquals(5.0f, orderDetail.getPrice()); + product = orderDetail.getProduct(); + assertEquals("CDO", product.getName()); + + // Modify + OrderDetail orderDetail2 = getModel1Factory().createOrderDetail(); + orderDetail2.setProduct(product); + orderDetail2.setPrice(10); + resource.getContents().add(0, orderDetail2); + + commitInfo = transaction.commit(); + dumpAll(session); + assertEquals(subBranch, commitInfo.getBranch()); + long commitTime2 = commitInfo.getTimeStamp(); + + transaction.close(); + closeSession1(); + + session = openSession2(); + branchManager = session.getBranchManager(); + mainBranch = branchManager.getMainBranch(); + subBranch = mainBranch.getBranch("subBranch"); + + check(session, mainBranch, commitTime1, 5, "CDO"); + check(session, mainBranch, commitTime2, 5, "CDO"); + check(session, mainBranch, CDOBranchPoint.UNSPECIFIED_DATE, 5, "CDO"); + + check(session, subBranch, commitTime1, 5, "CDO"); + check(session, subBranch, commitTime2, 5, 10, "CDO"); + check(session, subBranch, CDOBranchPoint.UNSPECIFIED_DATE, 5, 10, "CDO"); + + session.close(); + } + + public void testCommitRemoveOrderDetail() throws Exception + { + CDOSession session = openSession1(); + CDOBranchManager branchManager = session.getBranchManager(); + + // Commit to main branch + CDOBranch mainBranch = branchManager.getMainBranch(); + CDOTransaction transaction = session.openTransaction(mainBranch); + assertEquals(mainBranch, transaction.getBranch()); + assertEquals(CDOBranchPoint.UNSPECIFIED_DATE, transaction.getTimeStamp()); + + Product1 product = getModel1Factory().createProduct1(); + product.setName("CDO"); + + OrderDetail orderDetail = getModel1Factory().createOrderDetail(); + orderDetail.setProduct(product); + orderDetail.setPrice(5); + + OrderDetail orderDetail2 = getModel1Factory().createOrderDetail(); + orderDetail2.setProduct(product); + orderDetail2.setPrice(10); + + CDOResource resource = transaction.createResource("/res"); + resource.getContents().add(orderDetail2); + resource.getContents().add(product); + resource.getContents().add(orderDetail); + + CDOCommitInfo commitInfo = transaction.commit(); + dumpAll(session); + assertEquals(mainBranch, commitInfo.getBranch()); + long commitTime1 = commitInfo.getTimeStamp(); + transaction.close(); + + // Commit to sub branch + CDOBranch subBranch = mainBranch.createBranch("subBranch", commitTime1); + transaction = session.openTransaction(subBranch); + assertEquals(subBranch, transaction.getBranch()); + assertEquals(CDOBranchPoint.UNSPECIFIED_DATE, transaction.getTimeStamp()); + + resource = transaction.getResource("/res"); + orderDetail = (OrderDetail)resource.getContents().get(2); + assertEquals(5.0f, orderDetail.getPrice()); + product = orderDetail.getProduct(); + assertEquals("CDO", product.getName()); + + // Modify + resource.getContents().remove(product.getOrderDetails().remove(1)); + + commitInfo = transaction.commit(); + dumpAll(session); + assertEquals(subBranch, commitInfo.getBranch()); + long commitTime2 = commitInfo.getTimeStamp(); + + transaction.close(); + closeSession1(); + + session = openSession2(); + branchManager = session.getBranchManager(); + mainBranch = branchManager.getMainBranch(); + subBranch = mainBranch.getBranch("subBranch"); + + check(session, mainBranch, commitTime1, 5, 10, "CDO"); + check(session, mainBranch, commitTime2, 5, 10, "CDO"); + check(session, mainBranch, CDOBranchPoint.UNSPECIFIED_DATE, 5, 10, "CDO"); + + check(session, subBranch, commitTime1, 5, 10, "CDO"); + check(session, subBranch, commitTime2, 5, "CDO"); + check(session, subBranch, CDOBranchPoint.UNSPECIFIED_DATE, 5, "CDO"); + + session.close(); + } + public void _testDetachExisting() throws Exception { CDOSession session = openSession1(); @@ -519,6 +661,7 @@ public class BranchingTest extends AbstractCDOTest { CDOView view = session.openView(branch, timeStamp); CDOResource resource = view.getResource("/res"); + assertEquals(2, resource.getContents().size()); dumpAll(session); OrderDetail orderDetail = (OrderDetail)resource.getContents().get(1); @@ -533,6 +676,29 @@ public class BranchingTest extends AbstractCDOTest view.close(); } + private void check(CDOSession session, CDOBranch branch, long timeStamp, float price, float price2, String name) + { + CDOView view = session.openView(branch, timeStamp); + CDOResource resource = view.getResource("/res"); + assertEquals(3, resource.getContents().size()); + + dumpAll(session); + OrderDetail orderDetail2 = (OrderDetail)resource.getContents().get(0); + OrderDetail orderDetail = (OrderDetail)resource.getContents().get(2); + + dumpAll(session); + assertEquals(price, orderDetail.getPrice()); + assertEquals(price2, orderDetail2.getPrice()); + + Product1 product = orderDetail.getProduct(); + Product1 product2 = orderDetail2.getProduct(); + dumpAll(session); + assertEquals(name, product.getName()); + assertEquals(name, product2.getName()); + + view.close(); + } + public void testDetachWithoutRevision() throws Exception { CDOSession session = openSession1(); diff --git a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/BranchingTestWithCacheClear.java b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/BranchingTestWithCacheClear.java new file mode 100644 index 0000000000..d74b0fbc25 --- /dev/null +++ b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/BranchingTestWithCacheClear.java @@ -0,0 +1,15 @@ +package org.eclipse.emf.cdo.tests;
+
+
+/**
+ * @author Eike Stepper
+ */
+public class BranchingTestWithCacheClear extends BranchingTest
+{
+ @Override
+ protected void closeSession1()
+ {
+ super.closeSession1();
+ clearCache(getRepository().getRevisionManager());
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaCascadingBranchesTest.java b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaCascadingBranchesTest.java new file mode 100644 index 0000000000..7abb0baccf --- /dev/null +++ b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaCascadingBranchesTest.java @@ -0,0 +1,299 @@ +/**
+ * Copyright (c) 2004 - 2010 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Eike Stepper - initial API and implementation
+ */
+package org.eclipse.emf.cdo.tests;
+
+import org.eclipse.emf.cdo.common.branch.CDOBranch;
+import org.eclipse.emf.cdo.common.commit.CDOCommitInfo;
+import org.eclipse.emf.cdo.eresource.CDOResource;
+import org.eclipse.emf.cdo.session.CDOSession;
+import org.eclipse.emf.cdo.tests.model1.Company;
+import org.eclipse.emf.cdo.transaction.CDOTransaction;
+import org.eclipse.emf.cdo.util.CommitException;
+import org.eclipse.emf.cdo.view.CDOView;
+
+import org.eclipse.net4j.util.WrappedException;
+
+import org.eclipse.emf.common.util.BasicEList;
+import org.eclipse.emf.common.util.EList;
+
+/**
+ * @author Eike Stepper
+ */
+public class RevisionDeltaCascadingBranchesTest extends RevisionDeltaTest
+{
+ @Override
+ protected void testStoreDelta(ListManipulator manipulator)
+ {
+ BasicEList<Company> referenceSubSub = new BasicEList<Company>();
+ BasicEList<Company> referenceSub = new BasicEList<Company>();
+ BasicEList<Company> referenceMain = new BasicEList<Company>();
+
+ long timestampBaseSubBranch = 0L;
+ long timestampBaseSubSubBranch = 0L;
+ int subBranchID = 0;
+ int subsubBranchID = 0;
+
+ // main branch
+ {
+ CDOSession session = openSession();
+ CDOBranch mainBranch = session.getBranchManager().getMainBranch();
+
+ CDOTransaction transaction = session.openTransaction();
+ CDOResource resource = transaction.createResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 0, 10);
+ addCompaniesToList(referenceMain, 0, 10);
+ addCompaniesToList(referenceSub, 0, 10);
+ addCompaniesToList(referenceSubSub, 0, 10);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(mainBranch, info.getBranch());
+ timestampBaseSubBranch = info.getTimeStamp();
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ // main branch - second batch
+ {
+ CDOSession session = openSession();
+ CDOBranch mainBranch = session.getBranchManager().getMainBranch();
+
+ CDOTransaction transaction = session.openTransaction();
+ CDOResource resource = transaction.getResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 10, 15);
+ addCompaniesToList(referenceMain, 10, 15);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(mainBranch, info.getBranch());
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ // sub branch - second batch
+ {
+ CDOSession session = openSession();
+ CDOBranch mainBranch = session.getBranchManager().getMainBranch();
+ CDOBranch subBranch = mainBranch.createBranch("subBranch", timestampBaseSubBranch);
+ subBranchID = subBranch.getID();
+
+ CDOTransaction transaction = session.openTransaction(subBranch);
+ CDOResource resource = transaction.getResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 10, 15);
+ addCompaniesToList(referenceSub, 10, 15);
+ addCompaniesToList(referenceSubSub, 10, 15);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(subBranch, info.getBranch());
+ timestampBaseSubSubBranch = info.getTimeStamp();
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ // sub branch - third batch
+ {
+ CDOSession session = openSession();
+ CDOBranch subBranch = session.getBranchManager().getBranch(subBranchID);
+ CDOTransaction transaction = session.openTransaction(subBranch);
+ CDOResource resource = transaction.getResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 15, 20);
+ addCompaniesToList(referenceSub, 15, 20);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(subBranch, info.getBranch());
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ // sub branch - third batch
+ {
+ CDOSession session = openSession();
+ CDOBranch subBranch = session.getBranchManager().getBranch(subBranchID);
+ CDOBranch subsubBranch = subBranch.createBranch("subsubBranch", timestampBaseSubSubBranch);
+ subsubBranchID = subsubBranch.getID();
+
+ CDOTransaction transaction = session.openTransaction(subsubBranch);
+ CDOResource resource = transaction.getResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 15, 20);
+ addCompaniesToList(referenceSubSub, 15, 20);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(subsubBranch, info.getBranch());
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ // do manipulations in sub branch
+ {
+ CDOSession session = openSession();
+ CDOBranch subBranch = session.getBranchManager().getBranch(subBranchID);
+ CDOTransaction transaction = session.openTransaction(subBranch);
+ CDOResource resource = transaction.getResource("/test1");
+
+ manipulator.doManipulations(resource.getContents());
+ manipulator.doManipulations(referenceSub);
+
+ try
+ {
+ transaction.commit();
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ // do manipulations in sub-sub branch
+ {
+ CDOSession session = openSession();
+ CDOBranch subsubBranch = session.getBranchManager().getBranch(subsubBranchID);
+ CDOTransaction transaction = session.openTransaction(subsubBranch);
+ CDOResource resource = transaction.getResource("/test1");
+
+ manipulator.doManipulations(resource.getContents());
+ manipulator.doManipulations(referenceSubSub);
+
+ try
+ {
+ transaction.commit();
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ {
+ CDOSession session = openSession();
+ CDOBranch subsubBranch = session.getBranchManager().getBranch(subsubBranchID);
+ CDOView view = session.openView(subsubBranch);
+ CDOResource resource = view.getResource("/test1");
+
+ assertEquals(referenceSubSub.size(), resource.getContents().size());
+
+ for (int i = 0; i < referenceSubSub.size(); i++)
+ {
+ assertEquals(referenceSubSub.get(i).getName(), ((Company)resource.getContents().get(i)).getName());
+ }
+
+ view.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ {
+ CDOSession session = openSession();
+ CDOBranch subBranch = session.getBranchManager().getBranch(subBranchID);
+ CDOView view = session.openView(subBranch);
+ CDOResource resource = view.getResource("/test1");
+
+ assertEquals(referenceSub.size(), resource.getContents().size());
+
+ for (int i = 0; i < referenceSub.size(); i++)
+ {
+ assertEquals(referenceSub.get(i).getName(), ((Company)resource.getContents().get(i)).getName());
+ }
+
+ view.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ {
+ CDOSession session = openSession();
+ CDOView view = session.openView();
+ CDOResource resource = view.getResource("/test1");
+
+ assertEquals(referenceMain.size(), resource.getContents().size());
+
+ for (int i = 0; i < referenceMain.size(); i++)
+ {
+ assertEquals(referenceMain.get(i).getName(), ((Company)resource.getContents().get(i)).getName());
+ }
+
+ view.close();
+ session.close();
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ protected void addCompaniesToList(@SuppressWarnings("rawtypes") EList list, int from, int to)
+ {
+ for (int i = from; i < to; i++)
+ {
+ String name = "company " + i;
+ Company company = getModel1Factory().createCompany();
+ company.setName(name);
+ list.add(company);
+ }
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaInBranchTest.java b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaInBranchTest.java new file mode 100644 index 0000000000..3f1ec27867 --- /dev/null +++ b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaInBranchTest.java @@ -0,0 +1,198 @@ +/**
+ * Copyright (c) 2004 - 2010 Eike Stepper (Berlin, Germany) and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Eike Stepper - initial API and implementation
+ */
+package org.eclipse.emf.cdo.tests;
+
+import org.eclipse.emf.cdo.common.branch.CDOBranch;
+import org.eclipse.emf.cdo.common.commit.CDOCommitInfo;
+import org.eclipse.emf.cdo.eresource.CDOResource;
+import org.eclipse.emf.cdo.session.CDOSession;
+import org.eclipse.emf.cdo.tests.model1.Company;
+import org.eclipse.emf.cdo.transaction.CDOTransaction;
+import org.eclipse.emf.cdo.util.CommitException;
+import org.eclipse.emf.cdo.view.CDOView;
+
+import org.eclipse.net4j.util.WrappedException;
+
+import org.eclipse.emf.common.util.BasicEList;
+import org.eclipse.emf.common.util.EList;
+
+/**
+ * @author Eike Stepper
+ */
+public class RevisionDeltaInBranchTest extends RevisionDeltaTest
+{
+ @Override
+ protected void testStoreDelta(ListManipulator manipulator)
+ {
+ BasicEList<Company> referenceSub = new BasicEList<Company>();
+ BasicEList<Company> referenceMain = new BasicEList<Company>();
+
+ long timestampBaseBranch = 0L;
+ int subBranchID = 0;
+
+ // main branch
+ {
+ CDOSession session = openSession();
+ CDOBranch mainBranch = session.getBranchManager().getMainBranch();
+
+ CDOTransaction transaction = session.openTransaction();
+ CDOResource resource = transaction.createResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 0, 10);
+ addCompaniesToList(referenceMain, 0, 10);
+ addCompaniesToList(referenceSub, 0, 10);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(mainBranch, info.getBranch());
+ timestampBaseBranch = info.getTimeStamp();
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ // main branch - second batch
+ {
+ CDOSession session = openSession();
+ CDOBranch mainBranch = session.getBranchManager().getMainBranch();
+
+ CDOTransaction transaction = session.openTransaction();
+ CDOResource resource = transaction.getResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 10, 15);
+ addCompaniesToList(referenceMain, 10, 15);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(mainBranch, info.getBranch());
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ // sub branch - second batch
+ {
+ CDOSession session = openSession();
+ CDOBranch mainBranch = session.getBranchManager().getMainBranch();
+ CDOBranch subBranch = mainBranch.createBranch("subBranch", timestampBaseBranch);
+ subBranchID = subBranch.getID();
+
+ CDOTransaction transaction = session.openTransaction(subBranch);
+ CDOResource resource = transaction.getResource("/test1");
+
+ addCompaniesToList(resource.getContents(), 10, 20);
+ addCompaniesToList(referenceSub, 10, 20);
+
+ try
+ {
+ CDOCommitInfo info = transaction.commit();
+ assertEquals(subBranch, info.getBranch());
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ // do manipulations in branch
+ {
+ CDOSession session = openSession();
+ CDOBranch subBranch = session.getBranchManager().getBranch(subBranchID);
+ CDOTransaction transaction = session.openTransaction(subBranch);
+ CDOResource resource = transaction.getResource("/test1");
+
+ manipulator.doManipulations(resource.getContents());
+ manipulator.doManipulations(referenceSub);
+
+ try
+ {
+ transaction.commit();
+ }
+ catch (CommitException ex)
+ {
+ throw WrappedException.wrap(ex);
+ }
+
+ transaction.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ {
+ CDOSession session = openSession();
+ CDOBranch subBranch = session.getBranchManager().getBranch(subBranchID);
+ CDOView view = session.openView(subBranch);
+ CDOResource resource = view.getResource("/test1");
+
+ assertEquals(referenceSub.size(), resource.getContents().size());
+
+ for (int i = 0; i < referenceSub.size(); i++)
+ {
+ assertEquals(referenceSub.get(i).getName(), ((Company)resource.getContents().get(i)).getName());
+ }
+
+ view.close();
+ session.close();
+ }
+
+ clearCache(getRepository().getRevisionManager());
+
+ {
+ CDOSession session = openSession();
+ CDOView view = session.openView();
+ CDOResource resource = view.getResource("/test1");
+
+ assertEquals(referenceMain.size(), resource.getContents().size());
+
+ for (int i = 0; i < referenceMain.size(); i++)
+ {
+ assertEquals(referenceMain.get(i).getName(), ((Company)resource.getContents().get(i)).getName());
+ }
+
+ view.close();
+ session.close();
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ protected void addCompaniesToList(@SuppressWarnings("rawtypes") EList list, int from, int to)
+ {
+ for (int i = from; i < to; i++)
+ {
+ String name = "company " + i;
+ Company company = getModel1Factory().createCompany();
+ company.setName(name);
+ list.add(company);
+ }
+ }
+}
diff --git a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaTest.java b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaTest.java index aa1b2ccb08..e56ee8eca7 100644 --- a/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaTest.java +++ b/plugins/org.eclipse.emf.cdo.tests/src/org/eclipse/emf/cdo/tests/RevisionDeltaTest.java @@ -538,7 +538,7 @@ public class RevisionDeltaTest extends AbstractCDOTest return (InternalCDORevision)CDOUtil.getCDOObject((EObject)object).cdoRevision().copy(); } - private void testStoreDelta(ListManipulator manipulator) + protected void testStoreDelta(ListManipulator manipulator) { BasicEList<Company> reference = new BasicEList<Company>(); @@ -617,7 +617,7 @@ public class RevisionDeltaTest extends AbstractCDOTest /** * @author Simon McDuff */ - private static interface ListManipulator + protected static interface ListManipulator { public void doManipulations(EList<?> list); } |