Download
Getting Started
Members
Projects
Community
Marketplace
Events
Planet Eclipse
Newsletter
Videos
Participate
Report a Bug
Forums
Mailing Lists
Wiki
IRC
How to Contribute
Working Groups
Automotive
Internet of Things
LocationTech
Long-Term Support
PolarSys
Science
OpenMDM
More
Community
Marketplace
Events
Planet Eclipse
Newsletter
Videos
Participate
Report a Bug
Forums
Mailing Lists
Wiki
IRC
How to Contribute
Working Groups
Automotive
Internet of Things
LocationTech
Long-Term Support
PolarSys
Science
OpenMDM
Toggle navigation
Bugzilla – Attachment 178847 Details for
Bug 324341
support on delete cascade in DDL and runtime
Home
|
New
|
Browse
|
Search
|
[?]
|
Reports
|
Requests
|
Help
|
Log In
[x]
|
Terms of Use
|
Copyright Agent
[patch]
patch
2010-09-14-cascade-delete.patch (text/plain), 337.40 KB, created by
James Sutherland
on 2010-09-14 11:26:12 EDT
(
hide
)
Description:
patch
Filename:
MIME Type:
Creator:
James Sutherland
Created:
2010-09-14 11:26:12 EDT
Size:
337.40 KB
patch
obsolete
>Index: dbws/eclipselink.dbws.test.oracle/src/dbws/testing/oracleobjecttype/OracleObjecttypeTestSuite.java >=================================================================== >--- dbws/eclipselink.dbws.test.oracle/src/dbws/testing/oracleobjecttype/OracleObjecttypeTestSuite.java (revision 8109) >+++ dbws/eclipselink.dbws.test.oracle/src/dbws/testing/oracleobjecttype/OracleObjecttypeTestSuite.java (working copy) >@@ -444,9 +444,14 @@ > new ObjectPersistenceWorkbenchXMLProject(); > XMLTransformationMapping versionMapping = > (XMLTransformationMapping)runtimeProject.getDescriptor(Project.class). >- getMappings().firstElement(); >+ getMappings().get(0); > TransformerBasedFieldTransformation versionTransformer = >+<<<<<<< .mine >+ (TransformerBasedFieldTransformation)versionMapping.getFieldTransformations(). >+ get(0); >+======= > (TransformerBasedFieldTransformation)versionMapping.getFieldTransformations().get(0); >+>>>>>>> .r8104 > Field transformerField = > TransformerBasedFieldTransformation.class.getDeclaredField("transformer"); > transformerField.setAccessible(true); >Index: dbws/org.eclipse.persistence.dbws/src/org/eclipse/persistence/internal/dbws/ProviderHelper.java >=================================================================== >--- dbws/org.eclipse.persistence.dbws/src/org/eclipse/persistence/internal/dbws/ProviderHelper.java (revision 8109) >+++ dbws/org.eclipse.persistence.dbws/src/org/eclipse/persistence/internal/dbws/ProviderHelper.java (working copy) >@@ -22,6 +22,7 @@ > import java.util.Iterator; > import java.util.Map; > import java.util.Vector; >+import java.util.List; > import org.w3c.dom.Element; > import org.w3c.dom.Node; > import org.w3c.dom.NodeList; >@@ -331,7 +332,7 @@ > } > else { > ClassDescriptor desc = null; >- for (XMLDescriptor xdesc : (Vector<XMLDescriptor>)oxProject.getOrderedDescriptors()) { >+ for (XMLDescriptor xdesc : (List<XMLDescriptor>)(List)oxProject.getOrderedDescriptors()) { > XMLSchemaReference schemaReference = xdesc.getSchemaReference(); > if (schemaReference != null && > schemaReference.getSchemaContext().equalsIgnoreCase(key)) { >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/BasicTest.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/BasicTest.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/BasicTest.java (working copy) >@@ -83,11 +83,6 @@ > throw new TestErrorException("Failed to make a java.util.Vector from a java.util.Set."); > } > >- aVector.add(null); >- if (!Helper.removeNullElement(aVector)) { >- throw new TestErrorException("Failed to remove the first null element from java.util.Vector"); >- } >- > aVector.clear(); > for (int i = 0; i < 3; i++) { > aVector.add(i, new Integer(i)); >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsOfDifferentSizeTest.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsOfDifferentSizeTest.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsOfDifferentSizeTest.java (working copy) >@@ -1,60 +0,0 @@ >-/******************************************************************************* >- * Copyright (c) 1998, 2010 Oracle. All rights reserved. >- * This program and the accompanying materials are made available under the >- * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 >- * which accompanies this distribution. >- * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html >- * and the Eclipse Distribution License is available at >- * http://www.eclipse.org/org/documents/edl-v10.php. >- * >- * Contributors: >- * Oracle - initial API and implementation from Oracle TopLink >- ******************************************************************************/ >-package org.eclipse.persistence.testing.tests.helper; >- >-import java.util.Vector; >-import org.eclipse.persistence.internal.helper.*; >-import org.eclipse.persistence.testing.framework.*; >- >-public class CheckCompareOrderedVectorsOfDifferentSizeTest extends AutoVerifyTestCase { >- Exception e; >- Vector v1; >- Vector v2; >- boolean test1ResultIsTrue = false; >- >- public CheckCompareOrderedVectorsOfDifferentSizeTest() { >- setDescription("Test of Helper.compareOrderedVectors(Vector vector1, Vector vector2) when vectors are of different size."); >- } >- >- public void reset() { >- v1 = null; >- v2 = null; >- } >- >- public void setup() { >- v1 = new Vector(); >- v1.addElement(new Integer(1)); >- v1.addElement(new Integer(2)); >- v2 = new Vector(); >- v2.addElement(new Integer(3)); >- } >- >- public void test() { >- try { >- test1ResultIsTrue = Helper.compareOrderedVectors(v1, v2); >- >- } catch (Exception e) { >- this.e = e; >- throw new TestErrorException("An exception should not have been thrown when checking if vectors are of different size."); >- } >- } >- >- public void verify() { >- if (test1ResultIsTrue) { >- throw new TestErrorException("Helper.compareOrderedVectors(v1, v2) does not recognize that Vectors are of different size."); >- } >- if (e != null) { >- throw new TestErrorException("An exception should not have been thrown when checking if vectors are of different size: " + e.toString()); >- } >- } >-} >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsTest.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsTest.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsTest.java (working copy) >@@ -1,60 +0,0 @@ >-/******************************************************************************* >- * Copyright (c) 1998, 2010 Oracle. All rights reserved. >- * This program and the accompanying materials are made available under the >- * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 >- * which accompanies this distribution. >- * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html >- * and the Eclipse Distribution License is available at >- * http://www.eclipse.org/org/documents/edl-v10.php. >- * >- * Contributors: >- * Oracle - initial API and implementation from Oracle TopLink >- ******************************************************************************/ >-package org.eclipse.persistence.testing.tests.helper; >- >-import java.util.Vector; >-import org.eclipse.persistence.internal.helper.*; >-import org.eclipse.persistence.testing.framework.*; >- >-public class CheckCompareOrderedVectorsTest extends AutoVerifyTestCase { >- Exception e; >- Vector v1; >- Vector v2; >- boolean test1ResultIsTrue = false; >- >- public CheckCompareOrderedVectorsTest() { >- setDescription("Test of Helper.compareOrderedVectors(Vector vector1, Vector vector2) when vectors are identical."); >- } >- >- public void reset() { >- v1 = null; >- v2 = null; >- } >- >- public void setup() { >- v1 = new Vector(); >- v1.insertElementAt(new Integer(1), 0); >- v1.insertElementAt(new Integer(2), 1); >- v1.insertElementAt(new Integer(3), 2); >- v2 = v1; >- } >- >- public void test() { >- try { >- test1ResultIsTrue = Helper.compareOrderedVectors(v1, v2); >- >- } catch (Exception e) { >- this.e = e; >- throw new TestErrorException("An exception should not have been thrown when comparing Vectors - when the Vectors are identical."); >- } >- } >- >- public void verify() { >- if (!test1ResultIsTrue) { >- throw new TestErrorException("Helper.compareOrderedVectors(v1, v2) does not recognize that Vectors are identical."); >- } >- if (e != null) { >- throw new TestErrorException("An exception should not have been thrown when comparing Vectors - when the Vectors are identical: " + e.toString()); >- } >- } >-} >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsWithNullElementTest.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsWithNullElementTest.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareOrderedVectorsWithNullElementTest.java (working copy) >@@ -1,63 +0,0 @@ >-/******************************************************************************* >- * Copyright (c) 1998, 2010 Oracle. All rights reserved. >- * This program and the accompanying materials are made available under the >- * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 >- * which accompanies this distribution. >- * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html >- * and the Eclipse Distribution License is available at >- * http://www.eclipse.org/org/documents/edl-v10.php. >- * >- * Contributors: >- * Oracle - initial API and implementation from Oracle TopLink >- ******************************************************************************/ >-package org.eclipse.persistence.testing.tests.helper; >- >-import java.util.Vector; >-import org.eclipse.persistence.internal.helper.*; >-import org.eclipse.persistence.testing.framework.*; >- >-public class CheckCompareOrderedVectorsWithNullElementTest extends AutoVerifyTestCase { >- Exception e; >- Vector v1; >- Vector v2; >- boolean test1ResultIsTrue = false; >- >- public CheckCompareOrderedVectorsWithNullElementTest() { >- setDescription("Test of Helper.compareOrderedVectors(Vector vector1, Vector vector2) when first vector has null element."); >- } >- >- public void reset() { >- v1 = null; >- v2 = null; >- } >- >- public void setup() { >- v1 = new Vector(); >- v2 = new Vector(); >- v1.insertElementAt(new Integer(1), 0); >- v1.insertElementAt(null, 1); >- v1.insertElementAt(new Integer(3), 2); >- v2.insertElementAt(new Integer(1), 0); >- v2.insertElementAt(new Integer(2), 1); >- v2.insertElementAt(new Integer(3), 2); >- } >- >- public void test() { >- try { >- test1ResultIsTrue = Helper.compareOrderedVectors(v1, v2); >- >- } catch (Exception e) { >- this.e = e; >- throw new TestErrorException("An exception should not have been thrown when comparing vectors when first vector has null element."); >- } >- } >- >- public void verify() { >- if (test1ResultIsTrue) { >- throw new TestErrorException("Helper.compareOrderedVectors(v1, v2) does not recognize that one of vectors contains null element."); >- } >- if (e != null) { >- throw new TestErrorException("An exception should not have been thrown when comparing vectors when first vector has null element: " + e.toString()); >- } >- } >-} >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareUnorderedVectorsOfDifferentSizeTest.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareUnorderedVectorsOfDifferentSizeTest.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareUnorderedVectorsOfDifferentSizeTest.java (working copy) >@@ -1,60 +0,0 @@ >-/******************************************************************************* >- * Copyright (c) 1998, 2010 Oracle. All rights reserved. >- * This program and the accompanying materials are made available under the >- * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 >- * which accompanies this distribution. >- * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html >- * and the Eclipse Distribution License is available at >- * http://www.eclipse.org/org/documents/edl-v10.php. >- * >- * Contributors: >- * Oracle - initial API and implementation from Oracle TopLink >- ******************************************************************************/ >-package org.eclipse.persistence.testing.tests.helper; >- >-import java.util.Vector; >-import org.eclipse.persistence.internal.helper.*; >-import org.eclipse.persistence.testing.framework.*; >- >-public class CheckCompareUnorderedVectorsOfDifferentSizeTest extends AutoVerifyTestCase { >- Exception e; >- Vector v1; >- Vector v2; >- boolean test1ResultIsTrue = false; >- >- public CheckCompareUnorderedVectorsOfDifferentSizeTest() { >- setDescription("Test of Helper.compareUnorderedVectors(Vector vector1, Vector vector2) when vectors are of different size"); >- } >- >- public void reset() { >- v1 = null; >- v2 = null; >- } >- >- public void setup() { >- v1 = new Vector(); >- v1.addElement(new Integer(1)); >- v1.addElement(new Integer(2)); >- v2 = new Vector(); >- v2.addElement(new Integer(3)); >- } >- >- public void test() { >- try { >- test1ResultIsTrue = Helper.compareOrderedVectors(v1, v2); >- >- } catch (Exception e) { >- this.e = e; >- throw new TestErrorException("An exception should not have been thrown when checking if unordered vectors are of different size."); >- } >- } >- >- public void verify() { >- if (test1ResultIsTrue) { >- throw new TestErrorException("Helper.compareUnorderedVectors(v1, v2) does not recognize that unordered Vectors are of different size."); >- } >- if (e != null) { >- throw new TestErrorException("An exception should not have been thrown when checking if unordered Vectors are of different size: " + e.toString()); >- } >- } >-} >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareUnorderedVectorsWhenIdenticalTest.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareUnorderedVectorsWhenIdenticalTest.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/CheckCompareUnorderedVectorsWhenIdenticalTest.java (working copy) >@@ -1,60 +0,0 @@ >-/******************************************************************************* >- * Copyright (c) 1998, 2010 Oracle. All rights reserved. >- * This program and the accompanying materials are made available under the >- * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 >- * which accompanies this distribution. >- * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html >- * and the Eclipse Distribution License is available at >- * http://www.eclipse.org/org/documents/edl-v10.php. >- * >- * Contributors: >- * Oracle - initial API and implementation from Oracle TopLink >- ******************************************************************************/ >-package org.eclipse.persistence.testing.tests.helper; >- >-import java.util.Vector; >-import org.eclipse.persistence.internal.helper.*; >-import org.eclipse.persistence.testing.framework.*; >- >-public class CheckCompareUnorderedVectorsWhenIdenticalTest extends AutoVerifyTestCase { >- Exception e; >- Vector v1; >- Vector v2; >- boolean test1ResultIsTrue = false; >- >- public CheckCompareUnorderedVectorsWhenIdenticalTest() { >- setDescription("Test of Helper.compareUnrderedVectors(Vector vector1, Vector vector2) when vectors are identical."); >- } >- >- public void reset() { >- v1 = null; >- v2 = null; >- } >- >- public void setup() { >- v1 = new Vector(); >- v1.insertElementAt(new Integer(1), 0); >- v1.insertElementAt(new Integer(2), 1); >- v1.insertElementAt(new Integer(3), 2); >- v2 = v1; >- } >- >- public void test() { >- try { >- test1ResultIsTrue = Helper.compareUnorderedVectors(v1, v2); >- >- } catch (Exception e) { >- this.e = e; >- throw new TestErrorException("An exception should not have been thrown when comparing unordered vectors."); >- } >- } >- >- public void verify() { >- if (!test1ResultIsTrue) { >- throw new TestErrorException("Helper.compareUnorderedVectors(Vector v1, Vector v2) does not recognize that Vectors are identical."); >- } >- if (e != null) { >- throw new TestErrorException("An exception should not have been thrown when comparing unordered vectors when the Vectors are identical: " + e.toString()); >- } >- } >-} >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/HelperTestModel.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/HelperTestModel.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/helper/HelperTestModel.java (working copy) >@@ -35,13 +35,8 @@ > suite.addTest(new CompareCharArrayContentTest()); > suite.addTest(new CheckAreVectorTypesAssignableWithNullVectorTest()); > suite.addTest(new CheckAreVectorTypesAssignableTest()); >- suite.addTest(new CheckCompareOrderedVectorsWithNullElementTest()); >- suite.addTest(new CheckCompareOrderedVectorsOfDifferentSizeTest()); >- suite.addTest(new CheckCompareOrderedVectorsTest()); > suite.addTest(new CheckCompareByteArraysWithDifferentElementsTest()); > suite.addTest(new CheckCompareBigDecimalsTest()); >- suite.addTest(new CheckCompareUnorderedVectorsOfDifferentSizeTest()); >- suite.addTest(new CheckCompareUnorderedVectorsWhenIdenticalTest()); > suite.addTest(new CheckClassIsSubclassWithNullSuperclassTest()); > > suite.addTest(new BasicTest()); >Index: foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/validation/ExceptionTestSaveDescriptor.java >=================================================================== >--- foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/validation/ExceptionTestSaveDescriptor.java (revision 8109) >+++ foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/validation/ExceptionTestSaveDescriptor.java (working copy) >@@ -35,7 +35,7 @@ > > protected void setup() { > orgIntegrityChecker = getSession().getIntegrityChecker(); >- orgOrderedDescriptor = (Vector)getSession().getProject().getOrderedDescriptors().clone(); //added >+ orgOrderedDescriptor = new Vector(getSession().getProject().getOrderedDescriptors()); //added > orgDescriptors = (Map)((HashMap)getSession().getProject().getDescriptors()).clone(); > getSession().setIntegrityChecker(new IntegrityChecker()); //moved into setup > getSession().getIntegrityChecker().dontCatchExceptions(); //moved into setup >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/annotations/CascadeOnDelete.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/annotations/CascadeOnDelete.java (revision 0) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/annotations/CascadeOnDelete.java (revision 0) >@@ -0,0 +1,43 @@ >+/******************************************************************************* >+ * Copyright (c) 2010 Oracle. All rights reserved. >+ * This program and the accompanying materials are made available under the >+ * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 >+ * which accompanies this distribution. >+ * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html >+ * and the Eclipse Distribution License is available at >+ * http://www.eclipse.org/org/documents/edl-v10.php. >+ * >+ * Contributors: >+ * Oracle - initial API and implementation >+ ******************************************************************************/ >+package org.eclipse.persistence.annotations; >+ >+import java.lang.annotation.Retention; >+import java.lang.annotation.Target; >+ >+import static java.lang.annotation.ElementType.FIELD; >+import static java.lang.annotation.ElementType.METHOD; >+import static java.lang.annotation.ElementType.TYPE; >+import static java.lang.annotation.RetentionPolicy.RUNTIME; >+ >+/** >+ * Define the foreign key defined by the relationship to cascade the delete on the database. >+ * This means when the source object is deleted the target object will be automatically deleted by the database. >+ * This will affect DDL generation as well as runtime behavior in omitting the delete statements. >+ * <p> >+ * The constraint cascaded depends on the mapping, only relationship mappings are allowed. >+ * The relationship should also use cascade remove, or deleteOrphans. >+ * <p>For a OneToOne it can only be defined if the mapping uses a mappedBy, and will delete the target object. >+ * <p>It cannot be defined for a ManyToOne. >+ * <p>For a OneToMany it will delete the target objects, or ONLY the join table if using a join table. >+ * <p>For a ManyToMany it will delete the rows from the join table, not the target objects. >+ * <p>For an ElementCollection it will delete the target rows. >+ * <p>For an Entity it will delete the secondary or JOINED inheritance tables. >+ * >+ * @author James Sutherland >+ * @since EclipseLink 2.2 >+ */ >+@Target({METHOD, FIELD, TYPE}) >+@Retention(RUNTIME) >+public @interface CascadeOnDelete { >+} >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/descriptors/ClassDescriptor.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/descriptors/ClassDescriptor.java (revision 8110) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/descriptors/ClassDescriptor.java (working copy) >@@ -72,6 +72,9 @@ > protected transient Map<DatabaseTable, Map<DatabaseField, DatabaseField>> additionalTablePrimaryKeyFields; > protected transient List<DatabaseTable> multipleTableInsertOrder; > protected transient Map<DatabaseTable, Set<DatabaseTable>> multipleTableForeignKeys; >+ /** Support delete cascading on the database for multiple and inheritance tables. */ >+ protected boolean isCascadeOnDeleteSetOnDatabaseOnSecondaryTables; >+ > protected transient Vector<DatabaseField> fields; > protected transient Vector<DatabaseField> allFields; > protected Vector<DatabaseMapping> mappings; >@@ -2495,8 +2498,7 @@ > * Checks if the class has any private owned parts or other dependencies, (i.e. M:M join table). > */ > public boolean hasDependencyOnParts() { >- for (Enumeration mappings = getMappings().elements(); mappings.hasMoreElements();) { >- DatabaseMapping mapping = (DatabaseMapping)mappings.nextElement(); >+ for (DatabaseMapping mapping : getMappings()) { > if (mapping.hasDependency()) { > return true; > } >@@ -4163,8 +4165,27 @@ > this.multipleTableInsertOrder = newValue; > } > >+ /** >+ * ADVANCED: >+ * Return if delete cascading has been set on the database for the descriptor's >+ * multiple tables. >+ */ >+ public boolean isCascadeOnDeleteSetOnDatabaseOnSecondaryTables() { >+ return isCascadeOnDeleteSetOnDatabaseOnSecondaryTables; >+ } > > /** >+ * ADVANCED: >+ * Set if delete cascading has been set on the database for the descriptor's >+ * multiple tables. >+ * This will avoid the delete SQL being generated for those tables. >+ */ >+ public void setIsCascadeOnDeleteSetOnDatabaseOnSecondaryTables(boolean isCascadeOnDeleteSetOnDatabaseOnSecondaryTables) { >+ this.isCascadeOnDeleteSetOnDatabaseOnSecondaryTables = isCascadeOnDeleteSetOnDatabaseOnSecondaryTables; >+ } >+ >+ >+ /** > * INTERNAL: > * Set the ObjectBuilder. > */ >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/eis/mappings/EISOneToManyMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/eis/mappings/EISOneToManyMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/eis/mappings/EISOneToManyMapping.java (working copy) >@@ -19,7 +19,6 @@ > import java.util.Map; > import java.util.Vector; > >-import org.eclipse.persistence.descriptors.ClassDescriptor; > import org.eclipse.persistence.eis.EISDescriptor; > import org.eclipse.persistence.eis.EISException; > import org.eclipse.persistence.exceptions.DatabaseException; >@@ -103,20 +102,21 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isEISMapping() { > return true; > } > > /** >- * PUBLIC: >- * Define the source foreign key relationship in the one-to-many mapping. >- * This method is used for composite source foreign key relationships. >- * That is, the source object's table has multiple foreign key fields >- * that are references to >- * the target object's (typically primary) key fields. >- * Both the source foreign key field name and the corresponding >- * target primary key field name must be specified. >- */ >+ * PUBLIC: >+ * Define the source foreign key relationship in the one-to-many mapping. >+ * This method is used for composite source foreign key relationships. >+ * That is, the source object's table has multiple foreign key fields >+ * that are references to >+ * the target object's (typically primary) key fields. >+ * Both the source foreign key field name and the corresponding >+ * target primary key field name must be specified. >+ */ > public void addForeignKeyField(DatabaseField sourceForeignKeyField, DatabaseField targetKeyField) { > this.getSourceForeignKeyFields().add(sourceForeignKeyField); > this.getTargetForeignKeyFields().add(targetKeyField); >@@ -267,6 +267,7 @@ > * Return whether the mapping has any inverse constraint dependencies, > * such as foreign keys. > */ >+ @Override > public boolean hasInverseConstraintDependency() { > return true; > } >@@ -275,6 +276,7 @@ > * INTERNAL: > * Initialize the mapping. > */ >+ @Override > public void initialize(AbstractSession session) throws DescriptorException { > super.initialize(session); > >@@ -318,7 +320,7 @@ > * Initialize the delete all query. > * This query is used to delete the collection of objects from the > * database. >- **/ >+ */ > protected void initializeDeleteAllQuery() { > ((DeleteAllQuery)this.getDeleteAllQuery()).setReferenceClass(this.getReferenceClass()); > if (!this.hasCustomDeleteAllQuery()) { >@@ -328,19 +330,11 @@ > } > > /** >- * Return whether the reference objects must be deleted >- * one by one, as opposed to with a single DELETE statement. >- */ >- protected boolean mustDeleteReferenceObjectsOneByOne() { >- ClassDescriptor referenceDescriptor = this.getReferenceDescriptor(); >- return referenceDescriptor.hasDependencyOnParts() || referenceDescriptor.usesOptimisticLocking() || (referenceDescriptor.hasInheritance() && referenceDescriptor.getInheritancePolicy().shouldReadSubclasses()) || referenceDescriptor.hasMultipleTables(); >- } >- >- /** > * Return whether any process leading to object modification > * should also affect its parts. > * Used by write, insert, update, and delete. > */ >+ @Override > protected boolean shouldObjectModifyCascadeToParts(ObjectLevelModifyQuery query) { > if (isForeignKeyRelationship()) { > return super.shouldObjectModifyCascadeToParts(query); >@@ -361,6 +355,7 @@ > * INTERNAL: > * Used to verify whether the specified object is deleted or not. > */ >+ @Override > public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { > if (this.isPrivateOwned()) { > Object objects = this.getRealCollectionAttributeValueFromObject(object, session); >@@ -380,6 +375,7 @@ > * INTERNAL: > * Insert the reference objects. > */ >+ @Override > public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (isForeignKeyRelationship()) { > return; >@@ -426,6 +422,7 @@ > * INTERNAL: > * Update the reference objects. > */ >+ @Override > public void postUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (isForeignKeyRelationship()) { > return; >@@ -488,6 +485,7 @@ > * INTERNAL: > * Delete the reference objects. > */ >+ @Override > public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (isForeignKeyRelationship()) { > return; >@@ -525,6 +523,7 @@ > * INTERNAL: > * Insert privately owned parts > */ >+ @Override > public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (!this.isForeignKeyRelationship()) { > return; >@@ -577,6 +576,7 @@ > * INTERNAL: > * Update the privately owned parts. > */ >+ @Override > public void preUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (!this.isForeignKeyRelationship()) { > return; >@@ -673,6 +673,7 @@ > * INTERNAL: > * Clone the appropriate attributes. > */ >+ @Override > public Object clone() { > EISOneToManyMapping clone = (EISOneToManyMapping)super.clone(); > clone.setSourceForeignKeysToTargetKeys((Map)((HashMap)getSourceForeignKeysToTargetKeys()).clone()); >@@ -682,6 +683,7 @@ > /** > * Return all the fields mapped by the mapping. > */ >+ @Override > protected Vector collectFields() { > if (isForeignKeyRelationship()) { > if (this.getForeignKeyGroupingElement() != null) { >@@ -746,6 +748,7 @@ > * made using identity and, when appropriate, the value of the element's key > * for the Map container. > */ >+ @Override > public ChangeRecord compareForChange(Object clone, Object backup, ObjectChangeSet owner, AbstractSession session) { > if (isForeignKeyRelationship()) { > if ((this.getAttributeValueFromObject(clone) != null) && (!this.isAttributeValueInstantiatedOrChanged(clone))) { >@@ -761,6 +764,7 @@ > * INTERNAL: > * Compare the attributes belonging to this mapping for the objects. > */ >+ @Override > public boolean compareObjects(Object object1, Object object2, AbstractSession session) { > if (isForeignKeyRelationship()) { > return (new EISOneToManyMappingHelper(this)).compareObjects(object1, object2, session); >@@ -769,19 +773,21 @@ > } > > /** >- * ADVANCED: >- * This method is used to have an object add to a collection once the changeSet is applied >- * The referenceKey parameter should only be used for direct Maps. >- */ >+ * ADVANCED: >+ * This method is used to have an object add to a collection once the changeSet is applied >+ * The referenceKey parameter should only be used for direct Maps. >+ */ >+ @Override > public void simpleAddToCollectionChangeRecord(Object referenceKey, Object changeSetToAdd, ObjectChangeSet changeSet, AbstractSession session) { > (new EISOneToManyMappingHelper(this)).simpleAddToCollectionChangeRecord(referenceKey, changeSetToAdd, changeSet, session); > } > > /** >- * ADVANCED: >- * This method is used to have an object removed from a collection once the changeSet is applied >- * The referenceKey parameter should only be used for direct Maps. >- */ >+ * ADVANCED: >+ * This method is used to have an object removed from a collection once the changeSet is applied >+ * The referenceKey parameter should only be used for direct Maps. >+ */ >+ @Override > public void simpleRemoveFromCollectionChangeRecord(Object referenceKey, Object changeSetToRemove, ObjectChangeSet changeSet, AbstractSession session) { > (new EISOneToManyMappingHelper(this)).simpleRemoveFromCollectionChangeRecord(referenceKey, changeSetToRemove, changeSet, session); > } >@@ -802,7 +808,7 @@ > } > > /** >- * This method will make sure that all the records privately owned by this mapping are >+ * This method will make sure that all the records privately owned by this mapping are > * actually removed. If such records are found then those are all read and removed one > * by one along with their privately owned parts. > */ >@@ -843,6 +849,7 @@ > * Return the value of the reference attribute or a value holder. > * Check whether the mapping's attribute should be optimized through batch and joining. > */ >+ @Override > public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) throws DatabaseException { > if (((EISDescriptor) this.getDescriptor()).getDataFormat() == EISDescriptor.XML) { > ((XMLRecord) row).setSession(executionSession); >@@ -996,6 +1003,7 @@ > * This row is built for shallow insert which happens in case of bidirectional inserts. > * The foreign keys must be set to null to avoid constraints. > */ >+ @Override > public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord row, AbstractSession session) { > if (isForeignKeyRelationship() && !isReadOnly()) { > if (getForeignKeyGroupingElement() != null) { >@@ -1013,6 +1021,7 @@ > * This row is built for shallow insert which happens in case of bidirectional inserts. > * The foreign keys must be set to null to avoid constraints. > */ >+ @Override > public void writeFromObjectIntoRowForShallowInsertWithChangeRecord(ChangeRecord changeRecord, AbstractRecord row, AbstractSession session) { > if (isForeignKeyRelationship() && !isReadOnly()) { > if (getForeignKeyGroupingElement() != null) { >@@ -1030,6 +1039,7 @@ > * If any of the references objects has changed, write out > * all the keys. > */ >+ @Override > public void writeFromObjectIntoRowForUpdate(WriteObjectQuery writeQuery, AbstractRecord row) throws DescriptorException { > if (!this.isAttributeValueInstantiatedOrChanged(writeQuery.getObject())) { > return; >@@ -1070,6 +1080,7 @@ > * INTERNAL: > * Write fields needed for insert into the template for with null values. > */ >+ @Override > public void writeInsertFieldsIntoRow(AbstractRecord row, AbstractSession session) { > if (isForeignKeyRelationship() && !isReadOnly()) { > if (getForeignKeyGroupingElement() != null) { >@@ -1086,14 +1097,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >- protected Object executeBatchQueryForPessimisticLocking(DatabaseQuery query, UnitOfWorkImpl unitOfWork, AbstractRecord argumentRow) { >- throw DescriptorException.invalidMappingOperation(this, "executeBatchQueryForPessimisticLocking"); >- } >- >- /** >- * INTERNAL: >- * This method is not supported in an EIS environment. >- */ >+ @Override > public void setSelectionSQLString(String sqlString) { > throw DescriptorException.invalidMappingOperation(this, "setSelectionSQLString"); > } >@@ -1102,6 +1106,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public void setSelectionCriteria(Expression anExpression) { > throw DescriptorException.invalidMappingOperation(this, "setSelectionCriteria"); > } >@@ -1110,6 +1115,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public void setUsesBatchReading(boolean usesBatchReading) { > throw DescriptorException.invalidMappingOperation(this, "setUsesBatchReading"); > } >@@ -1118,6 +1124,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public boolean shouldUseBatchReading() { > throw DescriptorException.invalidMappingOperation(this, "shouldUseBatchReading"); > } >@@ -1126,6 +1133,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public void useBatchReading() { > throw DescriptorException.invalidMappingOperation(this, "useBatchReading"); > } >@@ -1134,6 +1142,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public void dontUseBatchReading() { > throw DescriptorException.invalidMappingOperation(this, "dontUseBatchReading"); > } >@@ -1142,6 +1151,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public void addAscendingOrdering(String queryKeyName) { > throw DescriptorException.invalidMappingOperation(this, "addAscendingOrdering"); > } >@@ -1150,6 +1160,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public void addDescendingOrdering(String queryKeyName) { > throw DescriptorException.invalidMappingOperation(this, "addDescendingOrdering"); > } >@@ -1158,6 +1169,7 @@ > * INTERNAL: > * This method is not supported in an EIS environment. > */ >+ @Override > public void setDeleteAllSQLString(String sqlString) { > throw DescriptorException.invalidMappingOperation(this, "setDeleteAllSQLString"); > } >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/helper/Helper.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/helper/Helper.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/helper/Helper.java (working copy) >@@ -190,6 +190,20 @@ > return objects; > } > >+ public static List addAllUniqueToList(List objects, List objectsToAdd) { >+ if (objectsToAdd == null) { >+ return objects; >+ } >+ int size = objectsToAdd.size(); >+ for (int index = 0; index < size; index++) { >+ Object element = objectsToAdd.get(index); >+ if (!objects.contains(element)) { >+ objects.add(element); >+ } >+ } >+ return objects; >+ } >+ > /** > * Convert the specified vector into an array. > */ >@@ -228,21 +242,6 @@ > } > > /** >- * Convert the passed Vector to a Hashtable >- * Return the Hashtable >- */ >- public static Hashtable buildHashtableFromVector(Vector theVector) { >- Hashtable toReturn = new Hashtable(theVector.size()); >- >- Iterator iter = theVector.iterator(); >- while (iter.hasNext()) { >- Object next = iter.next(); >- toReturn.put(next, next); >- } >- return toReturn; >- } >- >- /** > * Convert the byte array to a HEX string. > * HEX allows for binary data to be printed. > */ >@@ -275,21 +274,6 @@ > } > > /** >- * Create a new Vector containing all of the hashtable elements >- * >- */ >- public static Vector buildVectorFromHashtableElements(Hashtable hashtable) { >- Vector vector = new Vector(hashtable.size()); >- Enumeration enumeration = hashtable.elements(); >- >- while (enumeration.hasMoreElements()) { >- vector.addElement(enumeration.nextElement()); >- } >- >- return vector; >- } >- >- /** > * Create a new Vector containing all of the map elements. > */ > public static Vector buildVectorFromMapElements(Map map) { >@@ -304,20 +288,6 @@ > } > > /** >- * Create a new Vector containing all of the hashtable elements. >- */ >- public static Vector buildVectorFromHashtableElements(Map hashtable) { >- Vector vector = new Vector(hashtable.size()); >- Iterator enumeration = hashtable.values().iterator(); >- >- while (enumeration.hasNext()) { >- vector.addElement(enumeration.next()); >- } >- >- return vector; >- } >- >- /** > * Answer a Calendar from a date. > */ > public static Calendar calendarFromUtilDate(java.util.Date date) { >@@ -621,34 +591,6 @@ > } > > /** >- * Compare the elements in two <code>Vector</code>s to see if they are equal. >- * The order of the elements is significant. >- * @return whether the two vectors are equal >- */ >- public static boolean compareOrderedVectors(Vector vector1, Vector vector2) { >- if (vector1 == vector2) { >- return true; >- } >- if (vector1.size() != vector2.size()) { >- return false; >- } >- for (int index = 0; index < vector1.size(); index++) { >- Object element1 = vector1.elementAt(index); >- Object element2 = vector2.elementAt(index); >- if (element1 == null) {// avoid null pointer exception >- if (element2 != null) { >- return false; >- } >- } else { >- if (!element1.equals(element2)) { >- return false; >- } >- } >- } >- return true; >- } >- >- /** > * Compare two potential arrays and return true if they are the same. Will > * check for BigDecimals as well. > */ >@@ -670,44 +612,8 @@ > > return false; > } >- >- /** >- * Compare the elements in two <code>Vector</code>s to see if they are equal. >- * The order of the elements is ignored. >- * @param v1 a vector >- * @param v2 a vector >- * @return whether the two vectors contain the same elements >- */ >- public static boolean compareUnorderedVectors(Vector v1, Vector v2) { >- if (v1 == v2) { >- return true; >- } >- if (v1.size() != v2.size()) { >- return false; >- } > >- // One of the Vectors must be cloned so we don't miscompare >- // vectors with the same elements but in different quantities. >- // e.g. [fred, sam, sam] != [fred, sam, fred] >- Vector v3 = (Vector)v2.clone(); >- for (int i = 0; i < v1.size(); i++) { >- Object e1 = v1.elementAt(i); >- if (e1 == null) {// avoid null pointer exception >- // Helper.removeNullElement() will return false if the element was not present to begin with >- if (!removeNullElement(v3)) { >- return false; >- } >- } else { >- // Vector.removeElement() will return false if the element was not present to begin with >- if (!v3.removeElement(e1)) { >- return false; >- } >- } >- } >- return true; >- } > >- > /** > * Merge the two Maps into a new HashMap. > */ >@@ -1371,22 +1277,6 @@ > } > > /** >- * Remove the first <code>null</code> element found in the specified <code>Vector</code>. >- * Return <code>true</code> if a <code>null</code> element was found and removed. >- * Return <code>false</code> if a <code>null</code> element was not found. >- * This is needed in jdk1.1, where <code>Vector.removeElement(Object)</code> >- * for a <code>null</code> element will result in a <code>NullPointerException</code>.... >- */ >- public static boolean removeNullElement(Vector v) { >- int indexOfNull = indexOfNullElement(v, 0); >- if (indexOfNull != -1) { >- v.removeElementAt(indexOfNull); >- return true; >- } >- return false; >- } >- >- /** > * Returns a String which has had enough of the specified character removed to be equal to > * the maximumStringLength. > */ >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/BasicIndirectionPolicy.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/BasicIndirectionPolicy.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/BasicIndirectionPolicy.java (working copy) >@@ -305,6 +305,18 @@ > public boolean objectIsInstantiated(Object object) { > return ((ValueHolderInterface)object).isInstantiated(); > } >+ >+ /** >+ * INTERNAL: >+ * Return whether the specified object can be instantiated without database access. >+ */ >+ public boolean objectIsEasilyInstantiated(Object object) { >+ if (object instanceof DatabaseValueHolder) { >+ return ((DatabaseValueHolder)object).isEasilyInstantiated(); >+ } else { >+ return true; >+ } >+ } > > /** > * INTERNAL: >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/ContainerIndirectionPolicy.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/ContainerIndirectionPolicy.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/ContainerIndirectionPolicy.java (working copy) >@@ -282,6 +282,19 @@ > public boolean objectIsInstantiated(Object object) { > return ((IndirectContainer)object).getValueHolder().isInstantiated(); > } >+ >+ /** >+ * INTERNAL: >+ * Return whether the specified object can be instantiated without database access. >+ */ >+ public boolean objectIsEasilyInstantiated(Object object) { >+ ValueHolderInterface valueHolder = ((IndirectContainer)object).getValueHolder(); >+ if (valueHolder instanceof DatabaseValueHolder) { >+ return ((DatabaseValueHolder)valueHolder).isEasilyInstantiated(); >+ } else { >+ return true; >+ } >+ } > > /** > * Sets the Container class which implements IndirectContainer >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/IndirectionPolicy.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/IndirectionPolicy.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/IndirectionPolicy.java (working copy) >@@ -294,6 +294,12 @@ > > /** > * INTERNAL: >+ * Return whether the specified object can be instantiated without database access. >+ */ >+ public abstract boolean objectIsEasilyInstantiated(Object object); >+ >+ /** >+ * INTERNAL: > * Return whether the specified object is instantiated, or if it has changes. > */ > public boolean objectIsInstantiatedOrChanged(Object object) { >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/NoIndirectionPolicy.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/NoIndirectionPolicy.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/NoIndirectionPolicy.java (working copy) >@@ -166,6 +166,14 @@ > public boolean objectIsInstantiated(Object object) { > return true; > } >+ >+ /** >+ * INTERNAL: >+ * Return whether the specified object can be instantiated without database access. >+ */ >+ public boolean objectIsEasilyInstantiated(Object object) { >+ return true; >+ } > > /** > * INTERNAL: >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/ProxyIndirectionPolicy.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/ProxyIndirectionPolicy.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/ProxyIndirectionPolicy.java (working copy) >@@ -176,6 +176,21 @@ > return true; > } > } >+ >+ /** >+ * INTERNAL: >+ * Return whether the specified object can be instantiated without database access. >+ */ >+ public boolean objectIsEasilyInstantiated(Object object) { >+ if (object instanceof Proxy) { >+ ProxyIndirectionHandler handler = (ProxyIndirectionHandler)Proxy.getInvocationHandler(object); >+ ValueHolderInterface valueHolder = handler.getValueHolder(); >+ if (valueHolder instanceof DatabaseValueHolder) { >+ return ((DatabaseValueHolder)valueHolder).isEasilyInstantiated(); >+ } >+ } >+ return true; >+ } > > /** > * INTERNAL: >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/TransparentIndirectionPolicy.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/TransparentIndirectionPolicy.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/indirection/TransparentIndirectionPolicy.java (working copy) >@@ -446,7 +446,21 @@ > } else { > return true;// it must be a "real" collection > } >- } >+ } >+ >+ /** >+ * INTERNAL: >+ * Return whether the specified object can be instantiated without database access. >+ */ >+ public boolean objectIsEasilyInstantiated(Object object) { >+ if (object instanceof IndirectContainer) { >+ ValueHolderInterface valueHolder = ((IndirectContainer)object).getValueHolder(); >+ if (valueHolder instanceof DatabaseValueHolder) { >+ return ((DatabaseValueHolder)valueHolder).isEasilyInstantiated(); >+ } >+ } >+ return true; >+ } > > /** > * INTERNAL: >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/localization/i18n/TraceLocalizationResource.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/localization/i18n/TraceLocalizationResource.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/localization/i18n/TraceLocalizationResource.java (working copy) >@@ -33,6 +33,8 @@ > { "stack_of_visited_objects_that_refer_to_the_corrupt_object", "stack of visited objects that refer to the corrupt object: {0}" }, > { "corrupt_object_referenced_through_mapping", "corrupt object referenced through mapping: {0}" }, > { "corrupt_object", "corrupt object: {0}" }, >+ { "begin_unit_of_work_flush", "begin unit of work flush" }, >+ { "end_unit_of_work_flush", "end unit of work flush" }, > { "begin_unit_of_work_commit", "begin unit of work commit" }, > { "end_unit_of_work_commit", "end unit of work commit" }, > { "resume_unit_of_work", "resume unit of work" }, >@@ -48,8 +50,8 @@ > { "register_new", "Register the new object {0}" }, > { "register_new_bean", "Register the new bean {0}" }, > { "register", "Register the object {0}" }, >- { "register_new_for_persist", "PERSIST operation called on: {0}." }, >- { "deleting_object", "The remove operation has been performed on: {0}"}, >+ { "register_new_for_persist", "persist() operation called on: {0}." }, >+ { "deleting_object", "remove() operation called on: {0}"}, > { "revert", "Revert the object''s attributes {0}" }, > { "unregister", "Unregister the object {0}" }, > { "begin_batch_statements", "Begin batch statements" }, >@@ -224,7 +226,7 @@ > { "acquire_client_session_broker", "acquire client session broker" }, > { "releasing_client_session_broker", "releasing client session broker" }, > { "client_released", "client released" }, >- { "client_acquired", "client acquired" }, >+ { "client_acquired", "client acquired: {0}" }, > { "tracking_pl_object", "track pessimistic locked object {0} with UnitOfWork {1}" }, > { "instantiate_pl_relationship", "instantiate pessimistic locking relationship when relationship is accessed in a new transaction." }, > { "descriptor_xml_not_in_jar", "The descriptor file ({0}) is not found in jar({1}) file, no migration therefore will be performed for this jar." }, >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/queries/ExpressionQueryMechanism.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/queries/ExpressionQueryMechanism.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/queries/ExpressionQueryMechanism.java (working copy) >@@ -22,6 +22,7 @@ > import org.eclipse.persistence.logging.SessionLog; > import org.eclipse.persistence.mappings.DatabaseMapping; > import org.eclipse.persistence.mappings.DirectCollectionMapping; >+import org.eclipse.persistence.mappings.ForeignReferenceMapping; > import org.eclipse.persistence.mappings.ManyToManyMapping; > import org.eclipse.persistence.exceptions.*; > import org.eclipse.persistence.mappings.OneToOneMapping; >@@ -334,19 +335,20 @@ > */ > protected Vector buildDeleteAllStatementsForMappingsWithTempTable(ClassDescriptor descriptor, DatabaseTable rootTable, Collection rootTablePrimaryKeyFields, boolean dontCheckDescriptor) { > Vector deleteStatements = new Vector(); >- Iterator itMappings = descriptor.getMappings().iterator(); >- while(itMappings.hasNext()) { >- DatabaseMapping mapping = (DatabaseMapping)itMappings.next(); >- if(mapping.isManyToManyMapping() || mapping.isDirectCollectionMapping()) { >- if(dontCheckDescriptor || mapping.getDescriptor().equals(descriptor)) { >+ for (DatabaseMapping mapping : descriptor.getMappings()) { >+ // TODO: Also need to delete 1-1 using join table, maybe element collection. >+ if (mapping.isManyToManyMapping() || mapping.isDirectCollectionMapping()) { >+ if ((dontCheckDescriptor >+ || mapping.getDescriptor().equals(descriptor)) >+ && !((ForeignReferenceMapping)mapping).isCascadeOnDeleteSetOnDatabase()) { > Vector targetFields = null; >- if(mapping.isManyToManyMapping()) { >+ if (mapping.isManyToManyMapping()) { > targetFields = ((ManyToManyMapping)mapping).getSourceRelationKeyFields(); >- } else if(mapping.isDirectCollectionMapping()) { >+ } else if (mapping.isDirectCollectionMapping()) { > targetFields = ((DirectCollectionMapping)mapping).getReferenceKeyFields(); > } > >- DatabaseTable targetTable = ((DatabaseField)targetFields.firstElement()).getTable(); >+ DatabaseTable targetTable = ((DatabaseField)targetFields.get(0)).getTable(); > SQLDeleteAllStatementForTempTable deleteStatement > = buildDeleteAllStatementForTempTable(rootTable, rootTablePrimaryKeyFields, targetTable, targetFields); > deleteStatements.addElement(deleteStatement); >@@ -1040,49 +1042,49 @@ > * NOTE: A similar pattern also used in method buildDeleteAllStatementsForTempTable(): > * if you are updating this method consider applying a similar update to that method as well. > */ >- protected void prepareDeleteAll(Vector tablesToIgnore) { >+ protected void prepareDeleteAll(List<DatabaseTable> tablesToIgnore) { > List<DatabaseTable> tablesInInsertOrder; >+ ClassDescriptor descriptor = getDescriptor(); > if (tablesToIgnore == null) { > // It's original (not a nested) method call. >- tablesInInsertOrder = getDescriptor().getMultipleTableInsertOrder(); >+ tablesInInsertOrder = descriptor.getMultipleTableInsertOrder(); > } else { > // It's a nested method call: tableInInsertOrder filled with descriptor's tables (in insert order), > // the tables found in tablesToIgnore are thrown away - > // they have already been taken care of by the caller. > // In Employee example, query with reference class Project gets here > // to handle LPROJECT table; tablesToIgnore contains PROJECT table. >- tablesInInsertOrder = new Vector(getDescriptor().getMultipleTableInsertOrder().size()); >- for (Iterator tablesEnum = getDescriptor().getMultipleTableInsertOrder().iterator(); >- tablesEnum.hasNext();) { >- DatabaseTable table = (DatabaseTable)tablesEnum.next(); >- if(!tablesToIgnore.contains(table)) { >+ tablesInInsertOrder = new ArrayList(descriptor.getMultipleTableInsertOrder().size()); >+ for (DatabaseTable table : descriptor.getMultipleTableInsertOrder()) { >+ if (!tablesToIgnore.contains(table)) { > tablesInInsertOrder.add(table); > } > } > } > > // cache the flag - used many times >- boolean hasInheritance = getDescriptor().hasInheritance(); >+ boolean hasInheritance = descriptor.hasInheritance(); > > if (!tablesInInsertOrder.isEmpty()) { > Expression whereClause = getSelectionCriteria(); > > SQLCall selectCallForExist = null; > >- boolean isSelectCallForNotExistRequired = tablesToIgnore == null && tablesInInsertOrder.size() > 1; >+ boolean isSelectCallForNotExistRequired = (tablesToIgnore == null) >+ && (tablesInInsertOrder.size() > 1) && (!descriptor.isCascadeOnDeleteSetOnDatabaseOnSecondaryTables()); > > SQLSelectStatement selectStatementForNotExist = null; > SQLCall selectCallForNotExist = null; > > // inheritanceExpression is always null in a nested method call. > Expression inheritanceExpression = null; >- if(tablesToIgnore == null) { >+ if (tablesToIgnore == null) { > // It's original (not a nested) method call. >- if(hasInheritance) { >- if(getDescriptor().getInheritancePolicy().shouldReadSubclasses()) { >- inheritanceExpression = getDescriptor().getInheritancePolicy().getWithAllSubclassesExpression(); >+ if (hasInheritance) { >+ if (descriptor.getInheritancePolicy().shouldReadSubclasses()) { >+ inheritanceExpression = descriptor.getInheritancePolicy().getWithAllSubclassesExpression(); > } else { >- inheritanceExpression = getDescriptor().getInheritancePolicy().getOnlyInstancesExpression(); >+ inheritanceExpression = descriptor.getInheritancePolicy().getOnlyInstancesExpression(); > } > } > } >@@ -1091,9 +1093,9 @@ > > // Main Case: Descriptor is mapped to more than one table and/or the query references other tables > boolean isMainCase = selectStatementForExist.requiresAliases(); >- if(isMainCase) { >- if(whereClause != null) { >- if(getSession().getPlatform().shouldAlwaysUseTempStorageForModifyAll() && tablesToIgnore == null) { >+ if (isMainCase) { >+ if (whereClause != null) { >+ if (getSession().getPlatform().shouldAlwaysUseTempStorageForModifyAll() && tablesToIgnore == null) { > // currently DeleteAll using Oracle anonymous block is not implemented > if(!getSession().getPlatform().isOracle()) { > prepareDeleteAllUsingTempStorage(); >@@ -1101,18 +1103,18 @@ > } > } > >- if(isSelectCallForNotExistRequired) { >+ if (isSelectCallForNotExistRequired) { > selectStatementForNotExist = createSQLSelectStatementForModifyAll(null, null); > selectCallForNotExist = (SQLCall)selectStatementForNotExist.buildCall(getSession()); > } > } else { > //whereClause = null >- if(getSession().getPlatform().shouldAlwaysUseTempStorageForModifyAll() && tablesToIgnore == null) { >+ if (getSession().getPlatform().shouldAlwaysUseTempStorageForModifyAll() && tablesToIgnore == null) { > // currently DeleteAll using Oracle anonymous block is not implemented >- if(!getSession().getPlatform().isOracle()) { >+ if (!getSession().getPlatform().isOracle()) { > // the only case to handle without temp storage is inheritance root without inheritanceExpression: > // in this case all generated delete calls will have no where clauses. >- if(hasInheritance && !(inheritanceExpression == null && getDescriptor().getInheritancePolicy().isRootParentDescriptor())) { >+ if (hasInheritance && !(inheritanceExpression == null && descriptor.getInheritancePolicy().isRootParentDescriptor())) { > prepareDeleteAllUsingTempStorage(); > return; > } >@@ -1121,12 +1123,12 @@ > } > } else { > // simple case: Descriptor is mapped to a single table and the query references no other tables. >- if(whereClause != null) { >- if(getSession().getPlatform().shouldAlwaysUseTempStorageForModifyAll() && tablesToIgnore == null) { >+ if (whereClause != null) { >+ if (getSession().getPlatform().shouldAlwaysUseTempStorageForModifyAll() && tablesToIgnore == null) { > // currently DeleteAll using Oracle anonymous block is not implemented >- if(!getSession().getPlatform().isOracle()) { >+ if (!getSession().getPlatform().isOracle()) { > // if there are derived classes with additional tables - use temporary storage >- if(hasInheritance && getDescriptor().getInheritancePolicy().hasMultipleTableChild()) { >+ if (hasInheritance && descriptor.getInheritancePolicy().hasMultipleTableChild()) { > prepareDeleteAllUsingTempStorage(); > return; > } >@@ -1137,17 +1139,17 @@ > > // Don't use selectCallForExist in case there is no whereClause - > // a simpler sql will be created if possible. >- if(whereClause != null) { >+ if (whereClause != null) { > selectCallForExist = (SQLCall)selectStatementForExist.buildCall(getSession()); > } > >- if(isMainCase) { >+ if (isMainCase) { > // Main case: Descriptor is mapped to more than one table and/or the query references other tables > // > // Add and prepare to a call a delete statement for each table. >- // In the case of multiple tables, build the sql statements Vector in insert order. When the >+ // In the case of multiple tables, build the sql statements list in insert order. When the > // actual SQL calls are sent they are sent in the reverse of this order. >- for (DatabaseTable table : tablesInInsertOrder) { >+ for (DatabaseTable table : tablesInInsertOrder) { > Collection primaryKeyFields = getPrimaryKeyFieldsForTable(table); > SQLDeleteStatement deleteStatement; > >@@ -1156,7 +1158,7 @@ > // LargeProject will build "EXISTS" for LPROJECT and "NOT EXISTS" for Project. > // The situation is a bit more complex if more than two levels of inheritance is involved: > // both "EXISTS" and "NOT EXISTS" used for the "intermediate" (not first and not last) tables. >- if(!isSelectCallForNotExistRequired) { >+ if (!isSelectCallForNotExistRequired) { > // isSelectCallForNotExistRequired == false: > // either tablesToIgnore != null: it's a nested method call. > // Example: >@@ -1183,7 +1185,7 @@ > } > } else { > // there is inheritance >- if (table.equals(getDescriptor().getMultipleTableInsertOrder().get(0))) { >+ if (table.equals(descriptor.getMultipleTableInsertOrder().get(0))) { > // This is the highest table in inheritance hierarchy - the one that contains conditions > // (usually class indicator fields) that defines the class identity. > // inheritanceExpression is for this table (it doesn't reference any other tables). >@@ -1191,7 +1193,7 @@ > deleteStatement = buildDeleteAllStatement(table, inheritanceExpression, null, null, selectCallForNotExist, selectStatementForNotExist, primaryKeyFields); > } else { > ClassDescriptor desc = getHighestDescriptorMappingTable(table); >- if (desc == getDescriptor()) { >+ if (desc == descriptor) { > if (isLastTable) { > // In Employee example, query with reference class LargeProject calls this for LPROJECT table; > deleteStatement = buildDeleteAllStatement(table, null, selectCallForExist, selectStatementForExist, null, null, primaryKeyFields); >@@ -1243,11 +1245,15 @@ > } > } > >- if (getDescriptor().getTables().size() > 1) { >- getSQLStatements().addElement(deleteStatement); >+ if (descriptor.getTables().size() > 1) { >+ getSQLStatements().add(deleteStatement); > } else { > setSQLStatement(deleteStatement); > } >+ // Only delete from first table if delete is cascaded on the database. >+ if (descriptor.isCascadeOnDeleteSetOnDatabaseOnSecondaryTables()) { >+ break; >+ } > } > } else { > // A simple case: >@@ -1257,13 +1263,13 @@ > // In Employee example, query with reference class: > // Project will build a simple sql call for PROJECT(and will make nested method calls for LargeProject and SmallProject); > // SmallProject will build a simple sql call for PROJECT >- setSQLStatement(buildDeleteAllStatement(getDescriptor().getDefaultTable(), inheritanceExpression, selectCallForExist, selectStatementForExist, null, null, null)); >+ setSQLStatement(buildDeleteAllStatement(descriptor.getDefaultTable(), inheritanceExpression, selectCallForExist, selectStatementForExist, null, null, null)); > } > >- if(selectCallForExist == null) { >+ if (selectCallForExist == null) { > // Getting there means there is no whereClause. > // To handle the mappings selectCallForExist may be required in this case, too. >- if(hasInheritance && (tablesToIgnore != null || inheritanceExpression != null)) { >+ if (hasInheritance && (tablesToIgnore != null || inheritanceExpression != null)) { > // The only case NOT to create the call for no whereClause is either no inheritance, > // or it's an original (not a nested) method call and there is no inheritance expression. > // In Employee example: >@@ -1277,7 +1283,7 @@ > } > > // Add statements for ManyToMany and DirectCollection mappings >- Vector deleteStatementsForMappings = buildDeleteAllStatementsForMappings(selectCallForExist, selectStatementForExist, tablesToIgnore == null); >+ List<SQLStatement> deleteStatementsForMappings = buildDeleteAllStatementsForMappings(selectCallForExist, selectStatementForExist, tablesToIgnore == null); > if(!deleteStatementsForMappings.isEmpty()) { > if(getSQLStatement() != null) { > getSQLStatements().add(getSQLStatement()); >@@ -1288,37 +1294,37 @@ > } > > // Indicates whether the descriptor has children using extra tables. >- boolean hasChildrenWithExtraTables = hasInheritance && getDescriptor().getInheritancePolicy().hasChildren() && getDescriptor().getInheritancePolicy().hasMultipleTableChild(); >+ boolean hasChildrenWithExtraTables = hasInheritance && descriptor.getInheritancePolicy().hasChildren() && descriptor.getInheritancePolicy().hasMultipleTableChild(); > > // TBD: should we ignore subclasses in case descriptor doesn't want us to read them in? > //** Currently in this code we do ignore. > //** If it will be decided that we need to handle children in all cases > //** the following statement should be changed to: boolean shouldHandleChildren = hasChildrenWithExtraTables; >- boolean shouldHandleChildren = hasChildrenWithExtraTables && getDescriptor().getInheritancePolicy().shouldReadSubclasses(); >+ boolean shouldHandleChildren = hasChildrenWithExtraTables && descriptor.getInheritancePolicy().shouldReadSubclasses(); > > // Perform a nested method call for each child >- if(shouldHandleChildren) { >+ if (shouldHandleChildren) { > // In Employee example: query for Project will make nested calls to > // LargeProject and SmallProject and ask them to ignore PROJECT table >- Vector tablesToIgnoreForChildren = new Vector(); >+ List<DatabaseTable> tablesToIgnoreForChildren = new ArrayList(); > // The tables this descriptor has ignored, its children also should ignore. >- if(tablesToIgnore != null) { >+ if (tablesToIgnore != null) { > tablesToIgnoreForChildren.addAll(tablesToIgnore); > } > >- // If the desctiptor reads subclasses there is no need for >+ // If the descriptor reads subclasses there is no need for > // subclasses to process its tables for the second time. >- if (getDescriptor().getInheritancePolicy().shouldReadSubclasses()) { >+ if (descriptor.getInheritancePolicy().shouldReadSubclasses()) { > tablesToIgnoreForChildren.addAll(tablesInInsertOrder); > } > >- Iterator it = getDescriptor().getInheritancePolicy().getChildDescriptors().iterator(); >- while(it.hasNext()) { >+ Iterator it = descriptor.getInheritancePolicy().getChildDescriptors().iterator(); >+ while (it.hasNext()) { > // Define the same query for the child > ClassDescriptor childDescriptor = (ClassDescriptor)it.next(); > > // Need to process only "multiple tables" child descriptors >- if ((childDescriptor.getTables().size() > getDescriptor().getTables().size()) || >+ if (((!childDescriptor.isCascadeOnDeleteSetOnDatabaseOnSecondaryTables()) && childDescriptor.getTables().size() > descriptor.getTables().size()) || > (childDescriptor.getInheritancePolicy().hasMultipleTableChild())) > { > DeleteAllQuery childQuery = new DeleteAllQuery(); >@@ -1334,14 +1340,14 @@ > // Copy the statements from child query mechanism. > // In Employee example query for Project will pick up a statement for > // LPROJECT table from LargeProject and nothing from SmallProject. >- Vector childStatements = new Vector(); >- if(childMechanism.getCall() != null) { >+ List<SQLStatement> childStatements = new ArrayList(); >+ if (childMechanism.getCall() != null) { > childStatements.add(childMechanism.getSQLStatement()); > } else if(childMechanism.getSQLStatements() != null) { > childStatements.addAll(childMechanism.getSQLStatements()); > } >- if(!childStatements.isEmpty()) { >- if(getSQLStatement() != null) { >+ if (!childStatements.isEmpty()) { >+ if (getSQLStatement() != null) { > getSQLStatements().add(getSQLStatement()); > setSQLStatement(null); > } >@@ -1352,7 +1358,7 @@ > } > > // Nested method call doesn't need to call this. >- if(tablesToIgnore == null) { >+ if (tablesToIgnore == null) { > ((DeleteAllQuery)getQuery()).setIsPreparedUsingTempStorage(false); > super.prepareDeleteAll(); > } >@@ -1377,21 +1383,23 @@ > // in the tables NOT mapped to any class: ManyToManyMapping and DirectCollectionMapping > protected Vector buildDeleteAllStatementsForMappings(SQLCall selectCallForExist, SQLSelectStatement selectStatementForExist, boolean dontCheckDescriptor) { > Vector deleteStatements = new Vector(); >- Iterator itMappings = getDescriptor().getMappings().iterator(); >- while(itMappings.hasNext()) { >- DatabaseMapping mapping = (DatabaseMapping)itMappings.next(); >- if(mapping.isManyToManyMapping() || mapping.isDirectCollectionMapping()) { >- if(dontCheckDescriptor || mapping.getDescriptor().equals(getDescriptor())) { >+ ClassDescriptor descriptor = getDescriptor(); >+ for (DatabaseMapping mapping : descriptor.getMappings()) { >+ // TODO: Also need to delete 1-1 using join table, maybe element collection. >+ if (mapping.isManyToManyMapping() || mapping.isDirectCollectionMapping()) { >+ if ((dontCheckDescriptor >+ || mapping.getDescriptor().equals(descriptor)) >+ && !((ForeignReferenceMapping)mapping).isCascadeOnDeleteSetOnDatabase()) { > Vector sourceFields = null; > Vector targetFields = null; >- if(mapping.isManyToManyMapping()) { >+ if (mapping.isManyToManyMapping()) { > sourceFields = ((ManyToManyMapping)mapping).getSourceKeyFields(); > targetFields = ((ManyToManyMapping)mapping).getSourceRelationKeyFields(); >- } else if(mapping.isDirectCollectionMapping()) { >+ } else if (mapping.isDirectCollectionMapping()) { > sourceFields = ((DirectCollectionMapping)mapping).getSourceKeyFields(); > targetFields = ((DirectCollectionMapping)mapping).getReferenceKeyFields(); > } >- deleteStatements.addElement(buildDeleteAllStatementForMapping(selectCallForExist, selectStatementForExist, sourceFields, targetFields)); >+ deleteStatements.add(buildDeleteAllStatementForMapping(selectCallForExist, selectStatementForExist, sourceFields, targetFields)); > } > } > } >@@ -1469,19 +1477,24 @@ > * Pre-build the SQL statement from the expression. > */ > public void prepareDeleteObject() { >- if (getDescriptor().usesFieldLocking() && (getTranslationRow() == null)) { >+ ClassDescriptor descriptor = getDescriptor(); >+ if (descriptor.usesFieldLocking() && (getTranslationRow() == null)) { > return; > } > // Add and prepare to a call a delete statement for each table. > // In the case of multiple tables, build the sql statements Vector in insert order. When the > // actual SQL calls are sent they are sent in the reverse of this order. >- for (DatabaseTable table : getDescriptor().getMultipleTableInsertOrder()) { >+ for (DatabaseTable table : descriptor.getMultipleTableInsertOrder()) { > SQLDeleteStatement deleteStatement = buildDeleteStatement(table); >- if (getDescriptor().getTables().size() > 1) { >- getSQLStatements().addElement(deleteStatement); >+ if (descriptor.getTables().size() > 1) { >+ getSQLStatements().add(deleteStatement); > } else { > setSQLStatement(deleteStatement); > } >+ // Only delete from first table if delete is cascaded on the database. >+ if (descriptor.isCascadeOnDeleteSetOnDatabaseOnSecondaryTables()) { >+ break; >+ } > } > > super.prepareDeleteObject(); >@@ -2293,8 +2306,8 @@ > private Vector buildDeleteAllStatementsForTempTable(ClassDescriptor descriptor, DatabaseTable rootTable, Collection rootTablePrimaryKeyFields, Vector tablesToIgnore) { > Vector statements = org.eclipse.persistence.internal.helper.NonSynchronizedVector.newInstance(); > >- List tablesInInsertOrder; >- if(tablesToIgnore == null) { >+ List<DatabaseTable> tablesInInsertOrder; >+ if (tablesToIgnore == null) { > // It's original (not a nested) method call. > tablesInInsertOrder = descriptor.getMultipleTableInsertOrder(); > } else { >@@ -2303,10 +2316,8 @@ > // they have already been taken care of by the caller. > // In Employee example, query with reference class Project gets here > // to handle LPROJECT table; tablesToIgnore contains PROJECT table. >- tablesInInsertOrder = new Vector(descriptor.getMultipleTableInsertOrder().size()); >- for (Iterator tablesEnum = descriptor.getMultipleTableInsertOrder().iterator(); >- tablesEnum.hasNext();) { >- DatabaseTable table = (DatabaseTable)tablesEnum.next(); >+ tablesInInsertOrder = new ArrayList(descriptor.getMultipleTableInsertOrder().size()); >+ for (DatabaseTable table : descriptor.getMultipleTableInsertOrder()) { > if (!tablesToIgnore.contains(table)) { > tablesInInsertOrder.add(table); > } >@@ -2314,12 +2325,14 @@ > } > > if (!tablesInInsertOrder.isEmpty()) { >- Iterator itTables = tablesInInsertOrder.iterator(); >- while (itTables.hasNext()) { >- DatabaseTable table = (DatabaseTable)itTables.next(); >+ for (DatabaseTable table : tablesInInsertOrder) { > SQLDeleteAllStatementForTempTable deleteStatement > = buildDeleteAllStatementForTempTable(rootTable, rootTablePrimaryKeyFields, table, getPrimaryKeyFieldsForTable(descriptor, table)); >- statements.addElement(deleteStatement); >+ statements.add(deleteStatement); >+ // Only delete from first table if delete is cascaded on the database. >+ if (descriptor.isCascadeOnDeleteSetOnDatabaseOnSecondaryTables()) { >+ break; >+ } > } > > // Add statements for ManyToMany and DirectCollection mappings >@@ -2338,23 +2351,23 @@ > boolean shouldHandleChildren = hasChildrenWithExtraTables && descriptor.getInheritancePolicy().shouldReadSubclasses(); > > // Perform a nested method call for each child >- if(shouldHandleChildren) { >+ if (shouldHandleChildren) { > // In Employee example: query for Project will make nested calls to > // LargeProject and SmallProject and ask them to ignore PROJECT table > Vector tablesToIgnoreForChildren = new Vector(); > // The tables this descriptor has ignored, its children also should ignore. >- if(tablesToIgnore != null) { >+ if (tablesToIgnore != null) { > tablesToIgnoreForChildren.addAll(tablesToIgnore); > } > >- // If the desctiptor reads subclasses there is no need for >+ // If the descriptor reads subclasses there is no need for > // subclasses to process its tables for the second time. > if (descriptor.getInheritancePolicy().shouldReadSubclasses()) { > tablesToIgnoreForChildren.addAll(tablesInInsertOrder); > } > > Iterator it = descriptor.getInheritancePolicy().getChildDescriptors().iterator(); >- while(it.hasNext()) { >+ while (it.hasNext()) { > ClassDescriptor childDescriptor = (ClassDescriptor)it.next(); > > // Need to process only "multiple tables" child descriptors >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/AbstractSession.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/AbstractSession.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/AbstractSession.java (working copy) >@@ -3985,24 +3985,23 @@ > * @param allowSameQueryNameDiffArgsCopyToSession if the value is true, it allow > * multiple queries of the same name but different arguments to be copied to the session. > */ >- public void copyDescriptorNamedQueries(boolean allowSameQueryNameDiffArgsCopyToSession){ >- Vector descriptors = getProject().getOrderedDescriptors(); >- for( Iterator descItr = descriptors.iterator(); descItr.hasNext(); ){ >- Map queries = ((ClassDescriptor)descItr.next()).getQueryManager().getQueries(); >- if(queries!=null && queries.size()>0){ >- for(Iterator keyValueItr = queries.entrySet().iterator();keyValueItr.hasNext();){ >+ public void copyDescriptorNamedQueries(boolean allowSameQueryNameDiffArgsCopyToSession) { >+ for (ClassDescriptor descriptor : getProject().getOrderedDescriptors()) { >+ Map queries = descriptor.getQueryManager().getQueries(); >+ if ((queries != null) && (queries.size() > 0)) { >+ for (Iterator keyValueItr = queries.entrySet().iterator(); keyValueItr.hasNext();){ > Map.Entry entry = (Map.Entry) keyValueItr.next(); > Vector thisQueries = (Vector)entry.getValue(); >- if(thisQueries!=null && thisQueries.size()>0){ >- for(Iterator thisQueriesItr=thisQueries.iterator();thisQueriesItr.hasNext();){ >+ if ((thisQueries != null) && (thisQueries.size() > 0)){ >+ for( Iterator thisQueriesItr=thisQueries.iterator();thisQueriesItr.hasNext();){ > DatabaseQuery queryToBeAdded = (DatabaseQuery)thisQueriesItr.next(); >- if(allowSameQueryNameDiffArgsCopyToSession){ >+ if (allowSameQueryNameDiffArgsCopyToSession){ > addQuery(queryToBeAdded); > } else { >- if(getQuery(queryToBeAdded.getName())==null){ >+ if (getQuery(queryToBeAdded.getName()) == null){ > addQuery(queryToBeAdded); >- }else{ >- this.log(SessionLog.WARNING, SessionLog.PROPERTIES, "descriptor_named_query_cannot_be_added", new Object[]{queryToBeAdded,queryToBeAdded.getName(),queryToBeAdded.getArgumentTypes()}); >+ } else { >+ log(SessionLog.WARNING, SessionLog.PROPERTIES, "descriptor_named_query_cannot_be_added", new Object[]{queryToBeAdded,queryToBeAdded.getName(),queryToBeAdded.getArgumentTypes()}); > } > } > } >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/remote/RemoteUnitOfWork.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/remote/RemoteUnitOfWork.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/remote/RemoteUnitOfWork.java (working copy) >@@ -99,7 +99,7 @@ > */ > protected Vector collectUnregisteredNewObjects() { > discoverAllUnregisteredNewObjects(); >- return Helper.buildVectorFromHashtableElements(getUnregisteredNewObjects()); >+ return Helper.buildVectorFromMapElements(getUnregisteredNewObjects()); > } > > /** >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/RepeatableWriteUnitOfWork.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/RepeatableWriteUnitOfWork.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/RepeatableWriteUnitOfWork.java (working copy) >@@ -328,7 +328,6 @@ > * For synchronized units of work, dump SQL to database > */ > public void issueSQLbeforeCompletion() { >- > super.issueSQLbeforeCompletion(false); > > if (this.cumulativeUOWChangeSet != null){ >@@ -397,12 +396,12 @@ > */ > public void writeChanges() { > // Check for a nested flush and return early if we are in one >- if(this.isWithinFlush()) { >+ if (this.isWithinFlush()) { > AbstractSessionLog.getLog().log(SessionLog.WARNING, > "nested_entity_manager_flush_not_executed_pre_query_changes_may_be_pending", this.getClass().getSimpleName()); > return; >- } >- >+ } >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "begin_unit_of_work_flush"); > if (this.unitOfWorkChangeSet == null) { > this.unitOfWorkChangeSet = new UnitOfWorkChangeSet(this); > } >@@ -417,6 +416,7 @@ > // Write those changes to the database. > if (!changeSet.hasChanges() && !changeSet.hasForcedChanges() && ! this.hasDeletedObjects() && ! this.hasModifyAllQueries()) { > this.isWithinFlush = false; // clear the flag in the case that we don't have changes >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "end_unit_of_work_flush"); > return; > } > try { >@@ -436,8 +436,10 @@ > // Merge those changes back into the backup clones and the final uowChangeSet. > this.cumulativeUOWChangeSet.mergeUnitOfWorkChangeSet(changeSet, this, true); > } >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "end_unit_of_work_flush"); > > resumeUnitOfWork(); >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "resume_unit_of_work"); > } > > /** >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/UnitOfWorkImpl.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/UnitOfWorkImpl.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/internal/sessions/UnitOfWorkImpl.java (working copy) >@@ -291,6 +291,9 @@ > > /** records that the UOW is executing deferred events. Events could cause operations to occur that may attempt to restart the event execution. This must be avoided*/ > protected boolean isExecutingEvents = false; >+ >+ /** Set of objects that were deleted by database cascade delete constraints. */ >+ protected Set<Object> cascadeDeleteObjects; > > /** > * INTERNAL: >@@ -1077,11 +1080,11 @@ > return; > } > } >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "begin_unit_of_work_commit"); > if (this.lifecycle == CommitTransactionPending) { > commitAfterWriteChanges(); > return; > } >- log(SessionLog.FINER, SessionLog.TRANSACTION, "begin_unit_of_work_commit");// bjv - correct spelling > if (this.eventManager != null) { > this.eventManager.preCommitUnitOfWork(); > } >@@ -2187,6 +2190,33 @@ > public boolean hasContainerBeans() { > return ((containerBeans != null) && !containerBeans.isEmpty()); > } >+ >+ /** >+ * INTERNAL: >+ * Return any objects have been deleted through database cascade delete constraints. >+ */ >+ public Set<Object> getCascadeDeleteObjects() { >+ if (this.cascadeDeleteObjects == null) { >+ this.cascadeDeleteObjects = new IdentityHashSet(); >+ } >+ return this.cascadeDeleteObjects; >+ } >+ >+ /** >+ * INTERNAL: >+ * Set any objects have been deleted through database cascade delete constraints. >+ */ >+ protected void setCascadeDeleteObjects(Set<Object> cascadeDeleteObjects) { >+ this.cascadeDeleteObjects = cascadeDeleteObjects; >+ } >+ >+ /** >+ * INTERNAL: >+ * Return if any objects have been deleted through database cascade delete constraints. >+ */ >+ public boolean hasCascadeDeleteObjects() { >+ return ((this.cascadeDeleteObjects != null) && !this.cascadeDeleteObjects.isEmpty()); >+ } > > /** > * INTERNAL: >@@ -3177,9 +3207,9 @@ > commitTransactionAfterWriteChanges(); > return; > } >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "begin_unit_of_work_commit"); > mergeBmpAndWsEntities(); > // CR#... call event and log. >- log(SessionLog.FINER, SessionLog.TRANSACTION, "begin_unit_of_work_commit"); > if (this.eventManager != null) { > this.eventManager.preCommitUnitOfWork(); > } >@@ -5511,8 +5541,8 @@ > if (this.isNestedUnitOfWork) { > throw ValidationException.writeChangesOnNestedUnitOfWork(); > } >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "begin_unit_of_work_flush"); > mergeBmpAndWsEntities(); >- log(SessionLog.FINER, SessionLog.TRANSACTION, "begin_unit_of_work_commit"); > if (this.eventManager != null) { > this.eventManager.preCommitUnitOfWork(); > } >@@ -5526,6 +5556,7 @@ > throw exception; > } > setLifecycle(CommitTransactionPending); >+ log(SessionLog.FINER, SessionLog.TRANSACTION, "end_unit_of_work_flush"); > } > > /** >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/AggregateCollectionMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/AggregateCollectionMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/AggregateCollectionMapping.java (working copy) >@@ -369,11 +369,16 @@ > */ > public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ > //aggregate objects are not registered but their mappings should be. >- Object cloneAttribute = null; >- cloneAttribute = getAttributeValueFromObject(object); >+ Object cloneAttribute = getAttributeValueFromObject(object); > if ((cloneAttribute == null)) { > return; > } >+ // PERF: If not instantiated, then avoid instantiating, delete-all will handle deletion. >+ if (usesIndirection() && (!mustDeleteReferenceObjectsOneByOne())) { >+ if (!this.indirectionPolicy.objectIsInstantiated(cloneAttribute)) { >+ return; >+ } >+ } > > ObjectBuilder builder = null; > ContainerPolicy cp = getContainerPolicy(); >@@ -385,6 +390,9 @@ > Object nextObject = cp.unwrapIteratorResult(wrappedObject); > if (nextObject != null && ( ! visitedObjects.containsKey(nextObject) ) ){ > visitedObjects.put(nextObject, nextObject); >+ if (this.isCascadeOnDeleteSetOnDatabase) { >+ uow.getCascadeDeleteObjects().add(nextObject); >+ } > builder = getReferenceDescriptor(nextObject.getClass(), uow).getObjectBuilder(); > builder.cascadePerformRemove(nextObject, uow, visitedObjects); > cp.cascadePerformRemoveIfRequired(wrappedObject, uow, visitedObjects); >@@ -561,7 +569,12 @@ > } > > if (shouldRepairOrder) { >- ((DeleteAllQuery)getDeleteAllQuery()).executeDeleteAll(query.getSession().getSessionForClass(getReferenceClass()), query.getTranslationRow(), new Vector(previousList)); >+ DeleteAllQuery deleteAllQuery = (DeleteAllQuery)this.deleteAllQuery; >+ if (this.isCascadeOnDeleteSetOnDatabase) { >+ deleteAllQuery = (DeleteAllQuery)deleteAllQuery.clone(); >+ deleteAllQuery.setIsInMemoryOnly(false); >+ } >+ deleteAllQuery.executeDeleteAll(query.getSession().getSessionForClass(getReferenceClass()), query.getTranslationRow(), new Vector(previousList)); > } else { > // Next index the previous objects (read from db or from backup in uow) > for(int i=0; i < previousList.size(); i++) { >@@ -969,9 +982,21 @@ > * To delete all the entries matching the selection criteria from the table stored in the > * referenced descriptor > */ >- protected void deleteAll(DeleteObjectQuery query, Object elements) throws DatabaseException { >- // Ensure that the query is prepare before cloning. >- ((DeleteAllQuery)getDeleteAllQuery()).executeDeleteAll(query.getSession().getSessionForClass(getReferenceClass()), query.getTranslationRow(), getContainerPolicy().vectorFor(elements, query.getSession())); >+ protected void deleteAll(DeleteObjectQuery query, AbstractSession session) throws DatabaseException { >+ Object attribute = getAttributeValueFromObject(query.getObject()); >+ if (usesIndirection()) { >+ if (!this.indirectionPolicy.objectIsInstantiated(attribute)) { >+ // An empty Vector indicates to DeleteAllQuery that no objects should be removed from cache >+ ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(this.referenceClass), query.getTranslationRow(), new Vector(0)); >+ return; >+ } >+ } >+ Object referenceObjects = getRealCollectionAttributeValueFromObject(query.getObject(), session); >+ // PERF: Avoid delete if empty. >+ if (session.isUnitOfWork() && this.containerPolicy.isEmpty(referenceObjects)) { >+ return; >+ } >+ ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(this.referenceClass), query.getTranslationRow(), this.containerPolicy.vectorFor(referenceObjects, session)); > } > > /** >@@ -1606,6 +1631,7 @@ > query.setReferenceClass(getReferenceClass()); > query.setDescriptor(getReferenceDescriptor()); > query.setShouldMaintainCache(false); >+ query.setIsInMemoryOnly(isCascadeOnDeleteSetOnDatabase()); > if (!hasCustomDeleteAllQuery()) { > if (getSelectionCriteria() == null) { > query.setSelectionCriteria(getDeleteAllCriteria(session)); >@@ -2041,36 +2067,42 @@ > if (isReadOnly()) { > return; > } >+ AbstractSession session = query.getSession(); > >- Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); >- >- ContainerPolicy containerPolicy = getContainerPolicy(); >- >- // if privately owned parts have their privately own parts, delete those one by one >+ // If privately owned parts have their privately own parts, delete those one by one > // else delete everything in one shot. > int index = 0; >- if (containerPolicy.propagatesEventsToCollection() || mustDeleteReferenceObjectsOneByOne()) { >- for (Object iter = containerPolicy.iteratorFor(objects); containerPolicy.hasNext(iter);) { >- Object wrappedObject = containerPolicy.nextEntry(iter, query.getSession()); >+ if (mustDeleteReferenceObjectsOneByOne()) { >+ Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); >+ ContainerPolicy cp = getContainerPolicy(); >+ if (this.isCascadeOnDeleteSetOnDatabase && session.isUnitOfWork()) { >+ for (Object iterator = cp.iteratorFor(objects); cp.hasNext(iterator);) { >+ Object wrappedObject = cp.nextEntry(iterator, session); >+ Object object = cp.unwrapIteratorResult(wrappedObject); >+ ((UnitOfWorkImpl)session).getCascadeDeleteObjects().add(object); >+ } >+ } >+ for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { >+ Object wrappedObject = cp.nextEntry(iter, session); > DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); > deleteQuery.setIsExecutionClone(true); > Map extraData = null; >- if(this.listOrderField != null) { >+ if (this.listOrderField != null) { > extraData = new DatabaseRecord(1); > extraData.put(this.listOrderField, index++); > } > prepareModifyQueryForDelete(query, deleteQuery, wrappedObject, extraData); >- query.getSession().executeQuery(deleteQuery, deleteQuery.getTranslationRow()); >- containerPolicy.propogatePreDelete(query, wrappedObject); >+ session.executeQuery(deleteQuery, deleteQuery.getTranslationRow()); >+ cp.propogatePreDelete(query, wrappedObject); > } >- if (!query.getSession().isUnitOfWork()) { >+ if (!session.isUnitOfWork()) { > // This deletes any objects on the database, as the collection in memory may has been changed. > // This is not required for unit of work, as the update would have already deleted these objects, > // and the backup copy will include the same objects causing double deletes. > verifyDeleteForUpdate(query); > } > } else { >- deleteAll(query, objects); >+ deleteAll(query, session); > } > } > >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/CollectionMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/CollectionMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/CollectionMapping.java (working copy) >@@ -68,6 +68,9 @@ > **/ > protected OrderCorrectionType orderCorrectionType; > >+ /** Store if the mapping can batch delete reference objects. */ >+ protected boolean mustDeleteReferenceObjectsOneByOne; >+ > /** > * PUBLIC: > * Default constructor. >@@ -268,13 +271,19 @@ > */ > @Override > public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { >- if (!isCascadeRemove()) { >+ if (!this.cascadeRemove) { > return; > } > Object cloneAttribute = getAttributeValueFromObject(object); > if (cloneAttribute == null) { > return; > } >+ // PERF: If private owned and not instantiated, then avoid instantiating, delete-all will handle deletion. >+ if (this.isPrivateOwned && usesIndirection() && (!mustDeleteReferenceObjectsOneByOne())) { >+ if (!this.indirectionPolicy.objectIsEasilyInstantiated(cloneAttribute)) { >+ return; >+ } >+ } > > ContainerPolicy cp = this.containerPolicy; > Object cloneObjectCollection = null; >@@ -285,6 +294,9 @@ > Object nextObject = cp.unwrapIteratorResult(wrappedObject); > if ((nextObject != null) && (!visitedObjects.containsKey(nextObject))) { > visitedObjects.put(nextObject, nextObject); >+ if (this.isCascadeOnDeleteSetOnDatabase && isOneToManyMapping()) { >+ uow.getCascadeDeleteObjects().add(nextObject); >+ } > uow.performRemove(nextObject, visitedObjects); > cp.cascadePerformRemoveIfRequired(wrappedObject, uow, visitedObjects); > } >@@ -1225,8 +1237,7 @@ > * one by one, as opposed to with a single DELETE statement. > */ > protected boolean mustDeleteReferenceObjectsOneByOne() { >- ClassDescriptor referenceDescriptor = this.getReferenceDescriptor(); >- return referenceDescriptor.hasDependencyOnParts() || referenceDescriptor.usesOptimisticLocking() || (referenceDescriptor.hasInheritance() && referenceDescriptor.getInheritancePolicy().shouldReadSubclasses()) || referenceDescriptor.hasMultipleTables() || containerPolicy.propagatesEventsToCollection(); >+ return this.mustDeleteReferenceObjectsOneByOne; > } > > /** >@@ -1616,6 +1627,14 @@ > @Override > public void postInitialize(AbstractSession session) { > this.containerPolicy.postInitialize(session); >+ if (this.referenceDescriptor != null) { >+ this.mustDeleteReferenceObjectsOneByOne = this.referenceDescriptor.hasDependencyOnParts() >+ || this.referenceDescriptor.usesOptimisticLocking() >+ || (this.referenceDescriptor.hasInheritance() && this.referenceDescriptor.getInheritancePolicy().shouldReadSubclasses()) >+ || this.referenceDescriptor.hasMultipleTables() || this.containerPolicy.propagatesEventsToCollection(); >+ } else { >+ this.mustDeleteReferenceObjectsOneByOne = false; >+ } > } > > /** >@@ -2186,7 +2205,7 @@ > return true; > } > >- if (isPrivateOwned()) { >+ if (isPrivateOwned() || isCascadeRemove()) { > Object objects = getRealCollectionAttributeValueFromObject(object, session); > > ContainerPolicy containerPolicy = this.containerPolicy; >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/DatabaseMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/DatabaseMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/DatabaseMapping.java (working copy) >@@ -735,7 +735,7 @@ > * Return if the mapping has any ownership or other dependency over its target object(s). > */ > public boolean hasDependency() { >- return isPrivateOwned(); >+ return false; > } > > /** >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/DirectCollectionMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/DirectCollectionMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/DirectCollectionMapping.java (working copy) >@@ -2421,15 +2421,20 @@ > */ > @Override > public void preDelete(DeleteObjectQuery query) throws DatabaseException { >- if (isReadOnly()) { >+ if (this.isReadOnly) { > return; > } > >- prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getSession()); >- query.getSession().executeQuery(getDeleteAllQuery(), query.getTranslationRow()); >- if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { >- getHistoryPolicy().mappingLogicalDelete(getDeleteAllQuery(), query.getTranslationRow(), query.getSession()); >+ if (!this.isCascadeOnDeleteSetOnDatabase) { >+ prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getSession()); >+ query.getSession().executeQuery(this.deleteAllQuery, query.getTranslationRow()); > } >+ if ((this.historyPolicy != null) && this.historyPolicy.shouldHandleWrites()) { >+ if (this.isCascadeOnDeleteSetOnDatabase) { >+ prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getSession()); >+ } >+ this.historyPolicy.mappingLogicalDelete(this.deleteAllQuery, query.getTranslationRow(), query.getSession()); >+ } > } > > /** >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ForeignReferenceMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ForeignReferenceMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ForeignReferenceMapping.java (working copy) >@@ -118,6 +118,9 @@ > DEDICATED_QUERY > } > ExtendPessimisticLockScope extendPessimisticLockScope; >+ >+ /** Support delete cascading on the database relationship constraint. */ >+ protected boolean isCascadeOnDeleteSetOnDatabase; > > protected ForeignReferenceMapping() { > this.isPrivateOwned = false; >@@ -163,6 +166,7 @@ > * INTERNAL: > * Clone the attribute from the clone and assign it to the backup. > */ >+ @Override > public void buildBackupClone(Object clone, Object backup, UnitOfWorkImpl unitOfWork) { > Object attributeValue = getAttributeValueFromObject(clone); > Object clonedAttributeValue = this.indirectionPolicy.backupCloneAttribute(attributeValue, clone, backup, unitOfWork); >@@ -174,12 +178,14 @@ > * Used during building the backup shallow copy to copy the > * target object without re-registering it. > */ >+ @Override > public abstract Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork); > > /** > * INTERNAL: > * Clone the attribute from the original and assign it to the clone. > */ >+ @Override > public void buildClone(Object original, Object clone, UnitOfWorkImpl unitOfWork) { > Object attributeValue = getAttributeValueFromObject(original); > Object clonedAttributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, original, clone, unitOfWork, false); // building clone from an original not a row. >@@ -204,6 +210,7 @@ > * In order to bypass the shared cache when in transaction a UnitOfWork must > * be able to populate working copies directly from the row. > */ >+ @Override > public void buildCloneFromRow(AbstractRecord databaseRow, JoinedAttributeManager joinManager, Object clone, ObjectBuildingQuery sourceQuery, UnitOfWorkImpl unitOfWork, AbstractSession executionSession) { > Object attributeValue = valueFromRow(databaseRow, joinManager, sourceQuery, executionSession); > Object clonedAttributeValue = this.indirectionPolicy.cloneAttribute(attributeValue, null,// no original >@@ -225,6 +232,7 @@ > * INTERNAL: > * The mapping clones itself to create deep copy. > */ >+ @Override > public Object clone() { > ForeignReferenceMapping clone = (ForeignReferenceMapping)super.clone(); > >@@ -246,6 +254,7 @@ > * INTERNAL: Compare the attributes belonging to this mapping for the > * objects. > */ >+ @Override > public boolean compareObjects(Object firstObject, Object secondObject, AbstractSession session) { > if (isPrivateOwned()) { > return compareObjectsWithPrivateOwned(firstObject, secondObject, session); >@@ -269,8 +278,8 @@ > * Convert all the class-name-based settings in this mapping to actual class-based > * settings. This method is used when converting a project that has been built > * with class names to a project with classes. >- * @param classLoader > */ >+ @Override > public void convertClassNamesToClasses(ClassLoader classLoader){ > super.convertClassNamesToClasses(classLoader); > >@@ -784,6 +793,7 @@ > * Replace the transient attributes of the remote value holders > * with client-side objects. > */ >+ @Override > public void fixObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, RemoteSession session) { > this.indirectionPolicy.fixObjectReferences(object, objectDescriptors, processedObjects, query, session); > } >@@ -792,6 +802,7 @@ > * INTERNAL: > * Return the value of an attribute which this mapping represents for an object. > */ >+ @Override > public Object getAttributeValueFromObject(Object object) throws DescriptorException { > Object attributeValue = super.getAttributeValueFromObject(object); > Object indirectionValue = this.indirectionPolicy.validateAttributeOfInstantiatedObject(attributeValue); >@@ -854,6 +865,7 @@ > * ForeignReferenceMappings have to worry about > * maintaining object identity. > */ >+ @Override > public Object getObjectCorrespondingTo(Object object, RemoteSession session, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query) { > return session.getObjectCorrespondingTo(object, objectDescriptors, processedObjects, query); > } >@@ -864,6 +876,7 @@ > * If the attribute is using indirection the value of the value-holder is returned. > * If the value holder is not instantiated then it is instantiated. > */ >+ @Override > public Object getRealAttributeValueFromAttribute(Object attributeValue, Object object, AbstractSession session) { > return this.indirectionPolicy.getRealAttributeValueFromObject(object, attributeValue); > } >@@ -874,6 +887,7 @@ > * however for eager relationships this can be used with indirection to allow > * indirection locking and change tracking, but still always force instantiation. > */ >+ @Override > public boolean isLazy() { > if (isLazy == null) { > // False by default for mappings without indirection. >@@ -884,9 +898,9 @@ > > /** > * INTERNAL: >- * Return whether this mapping should be traversed when we are locking >- * @return >+ * Return whether this mapping should be traversed when we are locking. > */ >+ @Override > public boolean isLockableMapping(){ > return !(this.usesIndirection()); > } >@@ -895,6 +909,7 @@ > * INTERNAL: > * Trigger the instantiation of the attribute if lazy. > */ >+ @Override > public void instantiateAttribute(Object object, AbstractSession session) { > this.indirectionPolicy.instantiateObject(object, getAttributeValueFromObject(object)); > } >@@ -986,6 +1001,7 @@ > * Extract and return the appropriate value from the > * specified remote value holder. > */ >+ @Override > public Object getValueFromRemoteValueHolder(RemoteValueHolder remoteValueHolder) { > return this.indirectionPolicy.getValueFromRemoteValueHolder(remoteValueHolder); > } >@@ -1003,6 +1019,7 @@ > * INTERNAL: > * Initialize the state of mapping. > */ >+ @Override > public void preInitialize(AbstractSession session) throws DescriptorException { > super.preInitialize(session); > // If weaving was used the mapping must be configured to use the weaved get/set methods. >@@ -1027,6 +1044,7 @@ > * INTERNAL: > * Initialize the state of mapping. > */ >+ @Override > public void initialize(AbstractSession session) throws DescriptorException { > super.initialize(session); > if (isPrivateOwned){ >@@ -1131,7 +1149,17 @@ > > /** > * INTERNAL: >+ * Return if the mapping has any ownership or other dependency over its target object(s). > */ >+ @Override >+ public boolean hasDependency() { >+ return isPrivateOwned() || isCascadeRemove(); >+ } >+ >+ /** >+ * INTERNAL: >+ */ >+ @Override > public boolean isForeignReferenceMapping() { > return true; > } >@@ -1148,6 +1176,7 @@ > * PUBLIC: > * Return true if referenced objects are privately owned else false. > */ >+ @Override > public boolean isPrivateOwned() { > return isPrivateOwned; > } >@@ -1158,6 +1187,7 @@ > * The iterator's settings for cascading and value holders determine how the > * iteration continues from here. > */ >+ @Override > public void iterate(DescriptorIterator iterator) { > Object attributeValue = this.getAttributeValueFromObject(iterator.getVisitedParent()); > this.indirectionPolicy.iterateOnAttributeValue(iterator, attributeValue); >@@ -1168,6 +1198,7 @@ > * Iterate on the attribute value. > * The value holder has already been processed. > */ >+ @Override > public abstract void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue); > > /** >@@ -1195,6 +1226,7 @@ > * mappings are initialized and serialized reference descriptors are replaced with local descriptors if they already exist on the > * remote session. > */ >+ @Override > public void remoteInitialization(DistributedSession session) { > super.remoteInitialization(session); > setTempSession(session); >@@ -1204,6 +1236,7 @@ > * INTERNAL: > * replace the value holders in the specified reference object(s) > */ >+ @Override > public Map replaceValueHoldersIn(Object object, RemoteSessionController controller) { > return controller.replaceValueHoldersIn(object); > } >@@ -1336,6 +1369,7 @@ > * placing it inside a value holder if necessary. > * If the value holder is not instantiated then it is instantiated. > */ >+ @Override > public void setRealAttributeValueInObject(Object object, Object value) throws DescriptorException { > this.indirectionPolicy.setRealAttributeValueInObject(object, value); > } >@@ -1699,9 +1733,33 @@ > } > > /** >+ * ADVANCED: >+ * Return if delete cascading has been set on the database for the >+ * mapping's foreign key constraint. >+ */ >+ public boolean isCascadeOnDeleteSetOnDatabase() { >+ return isCascadeOnDeleteSetOnDatabase; >+ } >+ >+ /** >+ * ADVANCED: >+ * Set if delete cascading has been set on the database for the >+ * mapping's foreign key constraint. >+ * The behavior is dependent on the mapping. >+ * <p>OneToOne (target foreign key) - deletes target object (private owned) >+ * <p>OneToMany, AggregateCollection - deletes target objects (private owned) >+ * <p>ManyToMany - deletes from join table (only) >+ * <p>DirectCollection - delete from direct table >+ */ >+ public void setIsCascadeOnDeleteSetOnDatabase(boolean isCascadeOnDeleteSetOnDatabase) { >+ this.isCascadeOnDeleteSetOnDatabase = isCascadeOnDeleteSetOnDatabase; >+ } >+ >+ /** > * INTERNAL: > * To validate mappings declaration > */ >+ @Override > public void validateBeforeInitialization(AbstractSession session) throws DescriptorException { > super.validateBeforeInitialization(session); > >@@ -1728,6 +1786,7 @@ > * Check whether the mapping's attribute should be optimized through batch > * and joining. > */ >+ @Override > public Object valueFromRow(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) throws DatabaseException { > // PERF: Direct variable access. > // If the query uses batch reading, return a special value holder >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ManyToManyMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ManyToManyMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ManyToManyMapping.java (working copy) >@@ -67,6 +67,7 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isOwned(){ > return !isReadOnly; > } >@@ -74,6 +75,7 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isRelationalMapping() { > return true; > } >@@ -118,6 +120,7 @@ > * INTERNAL: > * The mapping clones itself to create deep copy. > */ >+ @Override > public Object clone() { > ManyToManyMapping clone = (ManyToManyMapping)super.clone(); > clone.mechanism = (RelationTableMechanism)this.mechanism.clone(); >@@ -130,15 +133,21 @@ > * This method is called to update collection tables prior to commit. > */ > @Override >- public void earlyPreDelete(DeleteObjectQuery query){ >+ public void earlyPreDelete(DeleteObjectQuery query) { > AbstractSession querySession = query.getSession(); >- prepareTranslationRow(query.getTranslationRow(), query.getObject(), querySession); >- querySession.executeQuery(getDeleteAllQuery(), query.getTranslationRow()); >+ if (!this.isCascadeOnDeleteSetOnDatabase) { >+ prepareTranslationRow(query.getTranslationRow(), query.getObject(), querySession); >+ querySession.executeQuery(this.deleteAllQuery, query.getTranslationRow()); >+ } > >- if ((getHistoryPolicy() != null) && getHistoryPolicy().shouldHandleWrites()) { >- getHistoryPolicy().mappingLogicalDelete(getDeleteAllQuery(), query.getTranslationRow(), querySession); >+ if ((this.historyPolicy != null) && this.historyPolicy.shouldHandleWrites()) { >+ if (this.isCascadeOnDeleteSetOnDatabase) { >+ prepareTranslationRow(query.getTranslationRow(), query.getObject(), querySession); >+ } >+ this.historyPolicy.mappingLogicalDelete(this.deleteAllQuery, query.getTranslationRow(), querySession); > } > } >+ > /** > * INTERNAL > * Called when a DatabaseMapping is used to map the key in a collection. Returns the key. >@@ -151,6 +160,7 @@ > * INTERNAL: > * Adds locking clause to the target query to extend pessimistic lock scope. > */ >+ @Override > protected void extendPessimisticLockScopeInTargetQuery(ObjectLevelReadQuery targetQuery, ObjectBuildingQuery sourceQuery) { > this.mechanism.setRelationTableLockingClause(targetQuery, sourceQuery); > } >@@ -165,6 +175,7 @@ > * This method must be implemented in subclasses that allow > * setting shouldExtendPessimisticLockScopeInSourceQuery to true. > */ >+ @Override > public void extendPessimisticLockScopeInSourceQuery(ObjectLevelReadQuery sourceQuery) { > Expression exp = sourceQuery.getSelectionCriteria(); > exp = this.mechanism.joinRelationTableField(exp, sourceQuery.getExpressionBuilder()); >@@ -242,6 +253,7 @@ > * Should be overridden by subclass that allows setting > * extendPessimisticLockScope to DEDICATED_QUERY. > */ >+ @Override > protected ReadQuery getExtendPessimisticLockScopeDedicatedQuery(AbstractSession session, short lockMode) { > if(this.mechanism != null) { > return this.mechanism.getLockRelationTableQueryClone(session, lockMode); >@@ -278,6 +290,7 @@ > > /** > * PUBLIC: >+ * Allows history tracking on the m-m join table. > */ > public HistoryPolicy getHistoryPolicy() { > return historyPolicy; >@@ -324,23 +337,6 @@ > } > > /** >- * INTERNAL: >- * Returns the selection criteria stored in the mapping selection query. This criteria >- * is used to read reference objects from the database. >- */ >- public Expression getSelectionCriteria() { >- return getSelectionQuery().getSelectionCriteria(); >- } >- >- /** >- * INTERNAL: >- * Returns the read query assoicated with the mapping. >- */ >- public ReadQuery getSelectionQuery() { >- return selectionQuery; >- } >- >- /** > * PUBLIC: > * Return the source key field names associated with the mapping. > * These are in-order with the sourceRelationKeyFieldNames. >@@ -420,14 +416,16 @@ > * INTERNAL: > * The join table is a dependency if not read-only. > */ >+ @Override > public boolean hasDependency() { >- return isPrivateOwned() || (!isReadOnly()); >+ return this.isPrivateOwned || (!this.isReadOnly); > } > > /** > * INTERNAL: > * Initialize mappings > */ >+ @Override > public void initialize(AbstractSession session) throws DescriptorException { > super.initialize(session); > getDescriptor().getPreDeleteMappings().add(this); >@@ -467,6 +465,7 @@ > * Verifies listOrderField's table: it must be relation table. > * Precondition: listOrderField != null. > */ >+ @Override > protected void buildListOrderField() { > if(this.listOrderField.hasTableName()) { > if(!getRelationTable().equals(this.listOrderField.getTable())) { >@@ -482,6 +481,7 @@ > * INTERNAL: > * Indicates whether getListOrderFieldExpression method should create field expression on table expression. > */ >+ @Override > public boolean shouldUseListOrderFieldTableExpression() { > return true; > } >@@ -703,8 +703,7 @@ > > /** > * INTERNAL: >- * Return whether this mapping was originally defined as a OneToMany >- * @return >+ * Return whether this mapping was originally defined as a OneToMany. > */ > public boolean isDefinedAsOneToManyMapping() { > return isDefinedAsOneToManyMapping; >@@ -714,28 +713,31 @@ > * INTERNAL: > * Return if this mapping support joining. > */ >+ @Override > public boolean isJoiningSupported() { > return true; > } > >- /** >- * INTERNAL: >- */ >+ @Override > public boolean isManyToManyMapping() { > return true; > } >- >+ > /** >- * For Many To Many mappings referenced objects are deleted one by one. >+ * INTERNAL: >+ * Ensure the container policy is post initialized > */ >- protected boolean mustDeleteReferenceObjectsOneByOne() { >- return true; >+ @Override >+ public void postInitialize(AbstractSession session) { >+ super.postInitialize(session); >+ this.mustDeleteReferenceObjectsOneByOne = true; > } > > /** > * INTERNAL: > * An object was added to the collection during an update, insert it if private. > */ >+ @Override > protected void objectAddedDuringUpdate(ObjectLevelModifyQuery query, Object objectAdded, ObjectChangeSet changeSet, Map extraData) throws DatabaseException, OptimisticLockException { > // First insert/update object. > super.objectAddedDuringUpdate(query, objectAdded, changeSet, extraData); >@@ -758,6 +760,7 @@ > * INTERNAL: > * An object was removed to the collection during an update, delete it if private. > */ >+ @Override > protected void objectRemovedDuringUpdate(ObjectLevelModifyQuery query, Object objectDeleted, Map extraData) throws DatabaseException, OptimisticLockException { > Object unwrappedObjectDeleted = getContainerPolicy().unwrapIteratorResult(objectDeleted); > AbstractRecord databaseRow = this.mechanism.buildRelationTableSourceAndTargetRow(query.getTranslationRow(), unwrappedObjectDeleted, query.getSession(), this); >@@ -781,6 +784,7 @@ > super.objectRemovedDuringUpdate(query, objectDeleted, extraData); > } > >+ @Override > protected void objectOrderChangedDuringUpdate(WriteObjectQuery query, Object orderChangedObject, int orderIndex) { > prepareTranslationRow(query.getTranslationRow(), query.getObject(), query.getSession()); > AbstractRecord databaseRow = this.mechanism.buildRelationTableSourceAndTargetRow(query.getTranslationRow(), orderChangedObject, query.getSession(), this); >@@ -794,6 +798,7 @@ > * Perform the commit event. > * This is used in the uow to delay data modifications. > */ >+ @Override > public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { > // Hey I might actually want to use an inner class here... ok array for now. > if (event[0] == PostInsert) { >@@ -819,6 +824,7 @@ > * <p>- execute the statement. > * <p>- Repeat above three statements until all the target objects are done. > */ >+ @Override > public void postInsert(WriteObjectQuery query) throws DatabaseException { > insertTargetObjects(query); > // Batch data modification in the uow >@@ -839,6 +845,7 @@ > * Delete entries removed, insert entries added. > * If private also insert/delete/update target objects. > */ >+ @Override > public void postUpdate(WriteObjectQuery query) throws DatabaseException { > if (isReadOnly()) { > return; >@@ -865,14 +872,11 @@ > */ > @Override > public void preDelete(DeleteObjectQuery query) throws DatabaseException { >- AbstractSession querySession = query.getSession(); >- if (querySession != null && querySession.isUnitOfWork()){ >- return; >- } >+ AbstractSession session = query.getSession(); > Object objectsIterator = null; > ContainerPolicy containerPolicy = getContainerPolicy(); > >- if (isReadOnly()) { >+ if (this.isReadOnly) { > return; > } > Object objects = null; >@@ -880,12 +884,17 @@ > boolean cascade = shouldObjectModifyCascadeToParts(query); > if (containerPolicy.propagatesEventsToCollection() || cascade) { > // if processed during UnitOfWork commit process the private owned delete will occur during change calculation >- objects = getRealCollectionAttributeValueFromObject(query.getObject(), querySession); >+ objects = getRealCollectionAttributeValueFromObject(query.getObject(), session); > //this must be done up here because the select must be done before the entry in the relation table is deleted. >+ // TODO: Hmm given the below code, the rows are already deleted, so this code is broken. >+ // Assuming it was a cascade remove, it will have been instantiated, so may be ok? > objectsIterator = containerPolicy.iteratorFor(objects); > } >- >+ >+ // This has already been done in a unit of work. >+ if (!session.isUnitOfWork()) { > earlyPreDelete(query); >+ } > > // If privately owned delete the objects, this does not handle removed objects (i.e. verify delete, not req in uow). > // Does not try to optimize delete all like 1-m, (rarely used and hard to do). >@@ -893,14 +902,14 @@ > if (objects != null) { > //objectsIterator will not be null because cascade check will still return true. > while (containerPolicy.hasNext(objectsIterator)) { >- Object wrappedObject = containerPolicy.nextEntry(objectsIterator, query.getSession()); >+ Object wrappedObject = containerPolicy.nextEntry(objectsIterator, session); > Object object = containerPolicy.unwrapIteratorResult(wrappedObject); > if (cascade){ > DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); > deleteQuery.setIsExecutionClone(true); > deleteQuery.setObject(object); > deleteQuery.setCascadePolicy(query.getCascadePolicy()); >- query.getSession().executeQuery(deleteQuery); >+ session.executeQuery(deleteQuery); > } > containerPolicy.propogatePreDelete(query, wrappedObject); > } >@@ -912,6 +921,7 @@ > * INTERNAL: > * The translation row may require additional fields than the primary key if the mapping in not on the primary key. > */ >+ @Override > protected void prepareTranslationRow(AbstractRecord translationRow, Object object, AbstractSession session) { > // Make sure that each source key field is in the translation row. > for (Enumeration sourceFieldsEnum = getSourceKeyFields().elements(); >@@ -1036,6 +1046,7 @@ > > /** > * PUBLIC: >+ * Enable history tracking on the m-m join table. > */ > public void setHistoryPolicy(HistoryPolicy policy) { > this.historyPolicy = policy; >@@ -1059,6 +1070,7 @@ > * This can be used by the session broker to override the default session > * to be used for the target class. > */ >+ @Override > public void setSessionName(String name) { > super.setSessionName(name); > this.mechanism.setSessionName(name); >@@ -1156,6 +1168,7 @@ > * INTERNAL: > * Append the temporal selection to the query selection criteria. > */ >+ @Override > protected ReadQuery prepareHistoricalQuery(ReadQuery targetQuery, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) { > if (getHistoryPolicy() != null) { > if (targetQuery == getSelectionQuery()) { >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ObjectReferenceMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ObjectReferenceMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/ObjectReferenceMapping.java (working copy) >@@ -48,6 +48,7 @@ > * Used during building the backup shallow copy to copy the vector without re-registering the target objects. > * For 1-1 or ref the reference is from the clone so it is already registered. > */ >+ @Override > public Object buildBackupCloneForPartObject(Object attributeValue, Object clone, Object backup, UnitOfWorkImpl unitOfWork) { > return attributeValue; > } >@@ -57,6 +58,7 @@ > * Require for cloning, the part must be cloned. > * Ignore the objects, use the attribute value. > */ >+ @Override > public Object buildCloneForPartObject(Object attributeValue, Object original, Object clone, UnitOfWorkImpl unitOfWork, boolean isExisting) { > if (attributeValue == null) { > return null; >@@ -104,6 +106,7 @@ > * this is a ObjectReference mapping, a recursive call is made to the buildExpressionFromExample method of > * ObjectBuilder. > */ >+ @Override > public Expression buildExpression(Object queryObject, QueryByExamplePolicy policy, Expression expressionBuilder, Map processedObjects, AbstractSession session) { > String attributeName = this.getAttributeName(); > Object attributeValue = this.getRealAttributeValueFromObject(queryObject, session); >@@ -128,6 +131,7 @@ > * Return an ObjectReferenceChangeRecord describing the change, or null if no change. > * Used to compute changes for deferred change tracking. > */ >+ @Override > public ChangeRecord compareForChange(Object clone, Object backUp, ObjectChangeSet owner, AbstractSession session) { > Object cloneAttribute = null; > Object backUpAttribute = null; >@@ -198,6 +202,7 @@ > * Used for independent relationships. > * This is used for testing and validation purposes. > */ >+ @Override > protected boolean compareObjectsWithoutPrivateOwned(Object firstObject, Object secondObject, AbstractSession session) { > Object firstReferencedObject = getRealAttributeValueFromObject(firstObject, session); > Object secondReferencedObject = getRealAttributeValueFromObject(secondObject, session); >@@ -229,6 +234,7 @@ > * Used for private relationships. > * This is used for testing and validation purposes. > */ >+ @Override > protected boolean compareObjectsWithPrivateOwned(Object firstObject, Object secondObject, AbstractSession session) { > Object firstPrivateObject = getRealAttributeValueFromObject(firstObject, session); > Object secondPrivateObject = getRealAttributeValueFromObject(secondObject, session); >@@ -245,6 +251,7 @@ > * ObjectReferenceMappings need to unwrap and wrap the > * reference object. > */ >+ @Override > public void fixRealObjectReferences(Object object, Map objectDescriptors, Map processedObjects, ObjectLevelReadQuery query, RemoteSession session) { > //bug 4147755 getRealAttribute... / setReal... > Object attributeValue = getRealAttributeValueFromObject(object, session); >@@ -275,6 +282,7 @@ > * INTERNAL: > * Object reference must unwrap the reference object if required. > */ >+ @Override > public Object getRealAttributeValueFromAttribute(Object attributeValue, Object object, AbstractSession session) { > Object value = super.getRealAttributeValueFromAttribute(attributeValue, object, session); > value = getReferenceDescriptor().getObjectBuilder().unwrapObject(value, session); >@@ -286,6 +294,7 @@ > * INTERNAL: > * Related mapping should implement this method to return true. > */ >+ @Override > public boolean isObjectReferenceMapping() { > return true; > } >@@ -295,6 +304,7 @@ > * Iterate on the attribute value. > * The value holder has already been processed. > */ >+ @Override > public void iterateOnRealAttributeValue(DescriptorIterator iterator, Object realAttributeValue) { > // This may be wrapped as the caller in iterate on foreign reference does not unwrap as the type is generic. > Object unwrappedAttributeValue = getReferenceDescriptor().getObjectBuilder().unwrapObject(realAttributeValue, iterator.getSession()); >@@ -305,6 +315,7 @@ > * INTERNAL: > * Merge changes from the source to the target object. Which is the original from the parent UnitOfWork > */ >+ @Override > public void mergeChangesIntoObject(Object target, ChangeRecord changeRecord, Object source, MergeManager mergeManager) { > Object targetValueOfSource = null; > >@@ -368,6 +379,7 @@ > * INTERNAL: > * Merge changes from the source to the target object. > */ >+ @Override > public void mergeIntoObject(Object target, boolean isTargetUnInitialized, Object source, MergeManager mergeManager) { > if (isTargetUnInitialized) { > // This will happen if the target object was removed from the cache before the commit was attempted, >@@ -493,6 +505,7 @@ > * INTERNAL: > * Insert privately owned parts > */ >+ @Override > public void preInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (isForeignKeyRelationship()) { > insert(query); >@@ -522,6 +535,7 @@ > * INTERNAL: > * Update privately owned parts > */ >+ @Override > public void preUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (!isAttributeValueInstantiated(query.getObject())) { > return; >@@ -571,6 +585,7 @@ > * INTERNAL: > * Delete privately owned parts > */ >+ @Override > public void postDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { > // Deletion takes place only if it has privately owned parts and mapping is not read only. > if (!shouldObjectModifyCascadeToParts(query)) { >@@ -592,6 +607,9 @@ > if (query.isCascadeOfAggregateDelete()) { > query.getSession().getCommitManager().addObjectToDelete(object); > } else { >+ if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism() && query.getSession().isUnitOfWork()) { >+ ((UnitOfWorkImpl)query.getSession()).getCascadeDeleteObjects().add(object); >+ } > DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); > deleteQuery.setIsExecutionClone(true); > deleteQuery.setObject(object); >@@ -606,6 +624,7 @@ > * INTERNAL: > * Insert privately owned parts > */ >+ @Override > public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (!isForeignKeyRelationship()) { > insert(query); >@@ -616,6 +635,7 @@ > * INTERNAL: > * Update privately owned parts > */ >+ @Override > public void postUpdate(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (!isAttributeValueInstantiated(query.getObject())) { > return; >@@ -663,6 +683,7 @@ > * INTERNAL: > * Delete privately owned parts > */ >+ @Override > public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { > // Deletion takes place according the the cascading policy > if (!shouldObjectModifyCascadeToParts(query)) { >@@ -687,6 +708,9 @@ > } > if ((keyForObjectInMemory == null) || !keyForObjectInDatabase.equals(keyForObjectInMemory)) { > if (objectFromDatabase != null) { >+ if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism() && query.getSession().isUnitOfWork()) { >+ ((UnitOfWorkImpl)query.getSession()).getCascadeDeleteObjects().add(objectFromDatabase); >+ } > DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); > deleteQuery.setIsExecutionClone(true); > deleteQuery.setObject(objectFromDatabase); >@@ -698,6 +722,9 @@ > > if (!isForeignKeyRelationship()) { > if (objectInMemory != null) { >+ if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism() && query.getSession().isUnitOfWork()) { >+ ((UnitOfWorkImpl)query.getSession()).getCascadeDeleteObjects().add(objectFromDatabase); >+ } > DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); > deleteQuery.setIsExecutionClone(true); > deleteQuery.setObject(objectInMemory); >@@ -716,6 +743,7 @@ > * INTERNAL: > * Cascade registerNew for Create through mappings that require the cascade > */ >+ @Override > public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ > cascadePerformRemoveIfRequired(object, uow, visitedObjects, true); > } >@@ -726,7 +754,7 @@ > * @param object is either the source object, or attribute value if getAttributeValueFromObject is true. > */ > public void cascadePerformRemoveIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects, boolean getAttributeValueFromObject) { >- if (!isCascadeRemove()) { >+ if (!this.cascadeRemove) { > return; > } > Object attributeValue = null; >@@ -741,6 +769,9 @@ > } > if (attributeValue != null && (! visitedObjects.containsKey(attributeValue)) ){ > visitedObjects.put(attributeValue, attributeValue); >+ if (this.isCascadeOnDeleteSetOnDatabase && !hasRelationTableMechanism()) { >+ uow.getCascadeDeleteObjects().add(attributeValue); >+ } > uow.performRemove(attributeValue, visitedObjects); > } > } >@@ -750,6 +781,7 @@ > * INTERNAL: > * Cascade removal of orphaned private owned objects from the UnitOfWorkChangeSet > */ >+ @Override > public void cascadePerformRemovePrivateOwnedObjectFromChangeSetIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects) { > // if the object is not instantiated, do not instantiate or cascade > Object attributeValue = getAttributeValueFromObject(object); >@@ -767,6 +799,7 @@ > * INTERNAL: > * Cascade discover and persist new objects during commit. > */ >+ @Override > public void cascadeDiscoverAndPersistUnregisteredNewObjects(Object object, Map newObjects, Map unregisteredExistingObjects, Map visitedObjects, UnitOfWorkImpl uow) { > cascadeDiscoverAndPersistUnregisteredNewObjects(object, newObjects, unregisteredExistingObjects, visitedObjects, uow, true); > } >@@ -798,6 +831,7 @@ > * INTERNAL: > * Cascade registerNew for Create through mappings that require the cascade > */ >+ @Override > public void cascadeRegisterNewIfRequired(Object object, UnitOfWorkImpl uow, Map visitedObjects){ > cascadeRegisterNewIfRequired(object, uow, visitedObjects, true); > } >@@ -840,6 +874,7 @@ > * INTERNAL: > * The returns if the mapping has any constraint dependencies, such as foreign keys and join tables. > */ >+ @Override > public boolean hasConstraintDependency() { > return isForeignKeyRelationship(); > } >@@ -851,6 +886,7 @@ > * from a row as opposed to building the original from the row, putting it in > * the shared cache, and then cloning the original. > */ >+ @Override > public UnitOfWorkValueHolder createUnitOfWorkValueHolder(ValueHolderInterface attributeValue, Object original, Object clone, AbstractRecord row, UnitOfWorkImpl unitOfWork, boolean buildDirectlyFromRow) { > UnitOfWorkQueryValueHolder valueHolder = null; > if ((row == null) && (isPrimaryKeyMapping())) { >@@ -921,6 +957,7 @@ > * INTERNAL: > * Initialize the state of mapping. > */ >+ @Override > public void preInitialize(AbstractSession session) throws DescriptorException { > super.preInitialize(session); > //Bug#4251902 Make Proxy Indirection writable and readable to deployment xml. If ProxyIndirectionPolicy does not >@@ -1031,6 +1068,14 @@ > > /** > * PUBLIC: >+ * Indicates whether the mapping has RelationTableMechanism. >+ */ >+ public boolean hasRelationTableMechanism() { >+ return false; >+ } >+ >+ /** >+ * PUBLIC: > * Set this mapping to use Proxy Indirection. > * > * Proxy Indirection uses the <CODE>Proxy</CODE> and <CODE>InvocationHandler</CODE> features >@@ -1077,6 +1122,7 @@ > * @see ContainerPolicy.buildReferencesPKList() > * @see MappedKeyMapContainerPolicy() > */ >+ @Override > public Object[] buildReferencesPKList(Object entity, Object attribute, AbstractSession session) { > ClassDescriptor referenceDescriptor = getReferenceDescriptor(); > Object target = this.indirectionPolicy.getRealAttributeValueFromObject(entity, attribute); >@@ -1162,6 +1208,7 @@ > * This method is used to load a relationship from a list of PKs. > * This list may be available if the relationship has been cached. > */ >+ @Override > public Object valueFromPKList(Object[] pks, AbstractSession session) { > Object pk = null; > if (pks[0] == null) return null; >@@ -1181,8 +1228,9 @@ > * INTERNAL: > * To verify if the specified object is deleted or not. > */ >+ @Override > public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { >- if (isPrivateOwned()) { >+ if (isPrivateOwned() || isCascadeRemove()) { > Object attributeValue = getRealAttributeValueFromObject(object, session); > > if (attributeValue != null) { >@@ -1198,6 +1246,7 @@ > * Get a value from the object and set that in the respective field of the row. > * But before that check if the reference object is instantiated or not. > */ >+ @Override > public void writeFromObjectIntoRowForUpdate(WriteObjectQuery query, AbstractRecord databaseRow) { > Object object = query.getObject(); > AbstractSession session = query.getSession(); >@@ -1219,6 +1268,7 @@ > * INTERNAL: > * Get a value from the object and set that in the respective field of the row. > */ >+ @Override > public void writeFromObjectIntoRowForWhereClause(ObjectLevelModifyQuery query, AbstractRecord databaseRow) { > if (isReadOnly()) { > return; >@@ -1241,6 +1291,7 @@ > * INTERNAL: > * Return if this mapping supports change tracking. > */ >+ @Override > public boolean isChangeTrackingSupported(Project project) { > return true; > } >@@ -1250,6 +1301,7 @@ > * Either create a new change record or update the change record with the new value. > * This is used by attribute change tracking. > */ >+ @Override > public void updateChangeRecord(Object clone, Object newValue, Object oldValue, ObjectChangeSet objectChangeSet, UnitOfWorkImpl uow) { > // Must ensure values are unwrapped. > Object unwrappedNewValue = newValue; >@@ -1275,6 +1327,7 @@ > * INTERNAL: > * Directly build a change record without comparison > */ >+ @Override > public ChangeRecord buildChangeRecord(Object clone, ObjectChangeSet owner, AbstractSession session) { > return internalBuildChangeRecord(getRealAttributeValueFromObject(clone, session), owner, session); > } >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/OneToManyMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/OneToManyMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/OneToManyMapping.java (working copy) >@@ -129,6 +129,7 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isRelationalMapping() { > return true; > } >@@ -239,6 +240,7 @@ > * INTERNAL: > * Clone the appropriate attributes. > */ >+ @Override > public Object clone() { > OneToManyMapping clone = (OneToManyMapping)super.clone(); > clone.setTargetForeignKeysToSourceKeys(new HashMap(getTargetForeignKeysToSourceKeys())); >@@ -263,17 +265,21 @@ > /** > * Delete all the reference objects with a single query. > */ >- protected void deleteAll(DeleteObjectQuery query) throws DatabaseException { >- Object attribute = getAttributeAccessor().getAttributeValueFromObject(query.getObject()); >- if(usesIndirection()) { >- if(attribute == null || !getIndirectionPolicy().objectIsInstantiated(attribute)) { >+ protected void deleteAll(DeleteObjectQuery query, AbstractSession session) throws DatabaseException { >+ Object attribute = getAttributeValueFromObject(query.getObject()); >+ if (usesIndirection()) { >+ if (!this.indirectionPolicy.objectIsInstantiated(attribute)) { > // An empty Vector indicates to DeleteAllQuery that no objects should be removed from cache >- ((DeleteAllQuery)getDeleteAllQuery()).executeDeleteAll(query.getSession().getSessionForClass(getReferenceClass()), query.getTranslationRow(), new Vector(0)); >+ ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(this.referenceClass), query.getTranslationRow(), new Vector(0)); > return; > } > } >- Object referenceObjects = this.getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); >- ((DeleteAllQuery)getDeleteAllQuery()).executeDeleteAll(query.getSession().getSessionForClass(getReferenceClass()), query.getTranslationRow(), getContainerPolicy().vectorFor(referenceObjects, query.getSession())); >+ Object referenceObjects = getRealCollectionAttributeValueFromObject(query.getObject(), session); >+ // PERF: Avoid delete if empty. >+ if (session.isUnitOfWork() && this.containerPolicy.isEmpty(referenceObjects)) { >+ return; >+ } >+ ((DeleteAllQuery)this.deleteAllQuery).executeDeleteAll(session.getSessionForClass(getReferenceClass()), query.getTranslationRow(), this.containerPolicy.vectorFor(referenceObjects, session)); > } > > /** >@@ -425,6 +431,7 @@ > * Return whether the mapping has any inverse constraint dependencies, > * such as foreign keys and join tables. > */ >+ @Override > public boolean hasInverseConstraintDependency() { > return true; > } >@@ -433,6 +440,7 @@ > * INTERNAL: > * Initialize the mapping. > */ >+ @Override > public void initialize(AbstractSession session) throws DescriptorException { > super.initialize(session); > >@@ -567,6 +575,7 @@ > protected void initializeDeleteAllQuery() { > ((DeleteAllQuery)getDeleteAllQuery()).setReferenceClass(getReferenceClass()); > getDeleteAllQuery().setName(getAttributeName()); >+ ((DeleteAllQuery)getDeleteAllQuery()).setIsInMemoryOnly(isCascadeOnDeleteSetOnDatabase()); > if (!hasCustomDeleteAllQuery()) { > // the selection criteria are re-used by the delete all query > if (getSelectionCriteria() == null) { >@@ -774,6 +783,7 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isOneToManyMapping() { > return true; > } >@@ -790,6 +800,7 @@ > * INTERNAL: > * An object was added to the collection during an update, insert it if private. > */ >+ @Override > protected void objectAddedDuringUpdate(ObjectLevelModifyQuery query, Object objectAdded, ObjectChangeSet changeSet, Map extraData) throws DatabaseException, OptimisticLockException { > // First insert/update object. > super.objectAddedDuringUpdate(query, objectAdded, changeSet, extraData); >@@ -814,6 +825,7 @@ > * INTERNAL: > * An object was removed to the collection during an update, delete it if private. > */ >+ @Override > protected void objectRemovedDuringUpdate(ObjectLevelModifyQuery query, Object objectDeleted, Map extraData) throws DatabaseException, OptimisticLockException { > if(!isPrivateOwned()) { > if (requiresDataModificationEvents() || containerPolicy.requiresDataModificationEvents()){ >@@ -841,6 +853,7 @@ > * Perform the commit event. > * This is used in the uow to delay data modifications. > */ >+ @Override > public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { > // Hey I might actually want to use an inner class here... ok array for now. > if (event[0] == PostInsert) { >@@ -858,6 +871,7 @@ > * INTERNAL: > * Insert the reference objects. > */ >+ @Override > public void postInsert(WriteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (isReadOnly()) { > return; >@@ -964,43 +978,51 @@ > @Override > public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { > if (!shouldObjectModifyCascadeToParts(query)) { >- if(this.listOrderField != null) { >+ if (this.listOrderField != null) { > updateTargetRowPreDeleteSource(query); > } > return; > } >+ AbstractSession session = query.getSession(); > >- Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), query.getSession()); >- >- ContainerPolicy cp = getContainerPolicy(); >- >- // if privately-owned parts have their privately-owned sub-parts, delete them one by one; >- // else delete everything in one shot >+ // If privately-owned parts have their privately-owned sub-parts, delete them one by one; >+ // else delete everything in one shot. > if (mustDeleteReferenceObjectsOneByOne()) { >- for (Object iter = cp.iteratorFor(objects); cp.hasNext(iter);) { >- Object wrappedObject = cp.nextEntry(iter, query.getSession()); >+ Object objects = getRealCollectionAttributeValueFromObject(query.getObject(), session); >+ ContainerPolicy cp = getContainerPolicy(); >+ if (this.isCascadeOnDeleteSetOnDatabase && session.isUnitOfWork()) { >+ for (Object iterator = cp.iteratorFor(objects); cp.hasNext(iterator);) { >+ Object wrappedObject = cp.nextEntry(iterator, session); >+ Object object = cp.unwrapIteratorResult(wrappedObject); >+ ((UnitOfWorkImpl)session).getCascadeDeleteObjects().add(object); >+ } >+ } >+ int cascade = query.getCascadePolicy(); >+ for (Object iterator = cp.iteratorFor(objects); cp.hasNext(iterator);) { >+ Object wrappedObject = cp.nextEntry(iterator, session); > Object object = cp.unwrapIteratorResult(wrappedObject); > DeleteObjectQuery deleteQuery = new DeleteObjectQuery(); > deleteQuery.setIsExecutionClone(true); > deleteQuery.setObject(object); >- deleteQuery.setCascadePolicy(query.getCascadePolicy()); >- query.getSession().executeQuery(deleteQuery); >- containerPolicy.propogatePreDelete(deleteQuery, wrappedObject); >+ deleteQuery.setCascadePolicy(cascade); >+ session.executeQuery(deleteQuery); >+ this.containerPolicy.propogatePreDelete(deleteQuery, wrappedObject); > } >- if (!query.getSession().isUnitOfWork()) { >+ if (!session.isUnitOfWork()) { > // This deletes any objects on the database, as the collection in memory may have been changed. > // This is not required for unit of work, as the update would have already deleted these objects, > // and the backup copy will include the same objects causing double deletes. > deleteReferenceObjectsLeftOnDatabase(query); > } > } else { >- deleteAll(query); >+ deleteAll(query, session); > } > } > > /** > * Prepare a cascade locking policy. > */ >+ @Override > public void prepareCascadeLockingPolicy() { > CascadeLockingPolicy policy = new CascadeLockingPolicy(getDescriptor(), getReferenceDescriptor()); > policy.setQueryKeyFields(getSourceKeysToTargetForeignKeys()); >@@ -1011,7 +1033,6 @@ > * INTERNAL: > * Returns whether this mapping uses data modification events to complete its writes > * @see UnidirectionalOneToManyMapping >- * @return > */ > public boolean requiresDataModificationEvents(){ > return this.listOrderField != null; >@@ -1072,6 +1093,7 @@ > * <p> > * Example: "delete from PHONE where OWNER_ID = #EMPLOYEE_ID" > */ >+ @Override > public void setDeleteAllSQLString(String sqlString) { > DeleteAllQuery query = new DeleteAllQuery(); > query.setSQLString(sqlString); >@@ -1085,6 +1107,7 @@ > * This can be used by the session broker to override the default session > * to be used for the target class. > */ >+ @Override > public void setSessionName(String name) { > super.setSessionName(name); > if (addTargetQuery != null){ >@@ -1196,6 +1219,7 @@ > * should also affect its parts. > * Used by write, insert, update, and delete. > */ >+ @Override > protected boolean shouldObjectModifyCascadeToParts(ObjectLevelModifyQuery query) { > if (isReadOnly()) { > return false; >@@ -1224,6 +1248,7 @@ > * INTERNAL > * Return true if this mapping supports cascaded version optimistic locking. > */ >+ @Override > public boolean isCascadedLockingSupported() { > return true; > } >@@ -1232,6 +1257,7 @@ > * INTERNAL: > * Return if this mapping support joining. > */ >+ @Override > public boolean isJoiningSupported() { > return true; > } >@@ -1275,7 +1301,7 @@ > query.getSession().executeQuery(addTargetQuery, databaseRow); > } > } >- >+ > protected AbstractRecord buildKeyRowForTargetUpdate(ObjectLevelModifyQuery query){ > return new DatabaseRecord(); > } >@@ -1401,9 +1427,10 @@ > * INTERNAL: > * Used to verify whether the specified object is deleted or not. > */ >+ @Override > public boolean verifyDelete(Object object, AbstractSession session) throws DatabaseException { >- if (this.isPrivateOwned()) { >- Object objects = this.getRealCollectionAttributeValueFromObject(object, session); >+ if (this.isPrivateOwned() || isCascadeRemove()) { >+ Object objects = getRealCollectionAttributeValueFromObject(object, session); > > ContainerPolicy containerPolicy = getContainerPolicy(); > for (Object iter = containerPolicy.iteratorFor(objects); containerPolicy.hasNext(iter);) { >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/OneToOneMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/OneToOneMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/OneToOneMapping.java (working copy) >@@ -90,6 +90,7 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isRelationalMapping() { > return true; > } >@@ -205,21 +206,14 @@ > * For mappings used as MapKeys in MappedKeyContainerPolicy. Add the target of this mapping to the deleted > * objects list if necessary > * >- * This method is used for removal of private owned relationships >- * >- * @param object >- * @param manager >+ * This method is used for removal of private owned relationships. > */ > public void addKeyToDeletedObjectsList(Object object, Map deletedObjects){ > deletedObjects.put(object, object); > } > > /** >- * Build a clone of the given element in a unitOfWork >- * @param element >- * @param unitOfWork >- * @param isExisting >- * @return >+ * Build a clone of the given element in a unitOfWork. > */ > public Object buildElementClone(Object attributeValue, Object parent, UnitOfWorkImpl unitOfWork, boolean isExisting){ > return buildCloneForPartObject(attributeValue, null, null, unitOfWork, isExisting); >@@ -394,6 +388,7 @@ > * This methods clones all the fields and ensures that each collection refers to > * the same clones. > */ >+ @Override > public Object clone() { > OneToOneMapping clone = (OneToOneMapping)super.clone(); > if(this.mechanism == null) { >@@ -535,6 +530,7 @@ > * INTERNAL: > * Adds locking clause to the target query to extend pessimistic lock scope. > */ >+ @Override > protected void extendPessimisticLockScopeInTargetQuery(ObjectLevelReadQuery targetQuery, ObjectBuildingQuery sourceQuery) { > if(this.mechanism == null) { > super.extendPessimisticLockScopeInTargetQuery(targetQuery, sourceQuery); >@@ -553,6 +549,7 @@ > * This method must be implemented in subclasses that allow > * setting shouldExtendPessimisticLockScopeInSourceQuery to true. > */ >+ @Override > public void extendPessimisticLockScopeInSourceQuery(ObjectLevelReadQuery sourceQuery) { > Expression exp = sourceQuery.getSelectionCriteria(); > if(this.mechanism == null) { >@@ -599,7 +596,6 @@ > /** > * INTERNAL: > * Extract the fields for the Map key from the object to use in a query >- * @return > */ > public Map extractIdentityFieldsForQuery(Object object, AbstractSession session){ > Map keyFields = new HashMap(); >@@ -773,7 +769,6 @@ > /** > * INTERNAL: > * Return any tables that will be required when this mapping is used as part of a join query >- * @return > */ > public List<DatabaseTable> getAdditionalTablesForJoinQuery(){ > List<DatabaseTable> tables = new ArrayList<DatabaseTable>(getReferenceDescriptor().getTables().size() + 1); >@@ -789,6 +784,7 @@ > * Should be overridden by subclass that allows setting > * extendPessimisticLockScope to DEDICATED_QUERY. > */ >+ @Override > protected ReadQuery getExtendPessimisticLockScopeDedicatedQuery(AbstractSession session, short lockMode) { > if(this.mechanism != null) { > return this.mechanism.getLockRelationTableQueryClone(session, lockMode); >@@ -802,6 +798,7 @@ > * Return the classification for the field contained in the mapping. > * This is used to convert the row value to a consistent java value. > */ >+ @Override > public Class getFieldClassification(DatabaseField fieldToClassify) throws DescriptorException { > DatabaseField fieldInTarget = getSourceToTargetKeyFields().get(fieldToClassify); > if (fieldInTarget == null) { >@@ -830,7 +827,7 @@ > } > > /** >- * Return the appropriate hashtable that maps the "foreign keys" >+ * Return the appropriate map that maps the "foreign keys" > * to the "primary keys". > */ > protected Map getForeignKeysToPrimaryKeys() { >@@ -868,7 +865,6 @@ > * Return the query that is used when this mapping is part of a joined relationship > * > * This method is used when this mapping is used to map the key in a Map >- * @return > */ > public ObjectLevelReadQuery getNestedJoinQuery(JoinedAttributeManager joinManager, ObjectLevelReadQuery query, AbstractSession session){ > return prepareNestedJoins(joinManager, query, session); >@@ -994,6 +990,7 @@ > * INTERNAL: > * Initialize the mapping. > */ >+ @Override > public void initialize(AbstractSession session) throws DescriptorException { > super.initialize(session); > >@@ -1184,6 +1181,7 @@ > * INTERNAL: > * Prepare a cascade locking policy. > */ >+ @Override > public void prepareCascadeLockingPolicy() { > CascadeLockingPolicy policy = new CascadeLockingPolicy(getDescriptor(), getReferenceDescriptor()); > policy.setQueryKeyFields(getSourceToTargetKeyFields(), ! isForeignKeyRelationship()); >@@ -1206,9 +1204,6 @@ > * Either a parameter can be used for the join or simply the database field > * > * The existing selection criteria can be built upon or a whole new criteria can be built. >- * @param useParameter >- * @param usePreviousSelectionCriteria >- * @return > */ > public Expression buildSelectionCriteria(boolean useParameter, boolean usePreviousSelectionCriteria){ > Expression criteria = null; >@@ -1253,6 +1248,7 @@ > * instantiating a working copy clone can be built without placing it in > * the shared cache (no concern over cycles). > */ >+ @Override > public void buildShallowOriginalFromRow(AbstractRecord databaseRow, Object original, JoinedAttributeManager joinManager, ObjectBuildingQuery query, AbstractSession executionSession) { > // Now we are only building this original so we can extract the primary > // key out of it. If the primary key is stored across a 1-1 a value >@@ -1280,6 +1276,7 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isOneToOneMapping() { > return true; > } >@@ -1287,6 +1284,7 @@ > /** > * INTERNAL: > */ >+ @Override > public boolean isOwned(){ > return this.hasRelationTable() && ! this.isReadOnly; > } >@@ -1295,6 +1293,7 @@ > * INTERNAL: > * Reads the private owned object. > */ >+ @Override > protected Object readPrivateOwnedForObject(ObjectLevelModifyQuery modifyQuery) throws DatabaseException { > if (modifyQuery.getSession().isUnitOfWork()) { > return super.readPrivateOwnedForObject(modifyQuery); >@@ -1315,6 +1314,7 @@ > * This is used to clone descriptors for aggregates, which hammer field names, > * it is probably better not to hammer the field name and this should be refactored. > */ >+ @Override > public void rehashFieldDependancies(AbstractSession session) { > setSourceToTargetKeyFields(Helper.rehashMap(getSourceToTargetKeyFields())); > } >@@ -1467,6 +1467,7 @@ > * INTERNAL > * Return true if this mapping supports cascaded version optimistic locking. > */ >+ @Override > public boolean isCascadedLockingSupported() { > return true; > } >@@ -1475,6 +1476,7 @@ > * INTERNAL: > * Return if this mapping support joining. > */ >+ @Override > public boolean isJoiningSupported() { > return true; > } >@@ -1482,8 +1484,6 @@ > /** > * INTERNAL: > * Called when iterating through descriptors to handle iteration on this mapping when it is used as a MapKey >- * @param iterator >- * @param element > */ > public void iterateOnMapKey(DescriptorIterator iterator, Object element){ > this.getIndirectionPolicy().iterateOnAttributeValue(iterator, element); >@@ -1491,24 +1491,16 @@ > > /** > * INTERNAL: >- * Allow the key mapping to unwrap the object >- * @param key >- * @param session >- * @return >- */ >- >+ * Allow the key mapping to unwrap the object. >+ */ > public Object unwrapKey(Object key, AbstractSession session){ > return getDescriptor().getObjectBuilder().unwrapObject(key, session); > } > > /** > * INTERNAL: >- * Allow the key mapping to wrap the object >- * @param key >- * @param session >- * @return >- */ >- >+ * Allow the key mapping to wrap the object. >+ */ > public Object wrapKey(Object key, AbstractSession session){ > return getDescriptor().getObjectBuilder().wrapObject(key, session); > } >@@ -1518,6 +1510,7 @@ > * A subclass should implement this method if it wants different behavior. > * Write the foreign key values from the attribute to the row. > */ >+ @Override > public void writeFromAttributeIntoRow(Object attribute, AbstractRecord row, AbstractSession session) > { > for (Enumeration fieldsEnum = getForeignKeyFields().elements(); fieldsEnum.hasMoreElements();) { >@@ -1536,6 +1529,7 @@ > * INTERNAL: > * Get a value from the object and set that in the respective field of the row. > */ >+ @Override > public Object valueFromObject(Object object, DatabaseField field, AbstractSession session) { > // First check if the value can be obtained from the value holder's row. > Object attributeValue = getAttributeValueFromObject(object); >@@ -1573,6 +1567,7 @@ > * Return the value of the field from the row or a value holder on the query to obtain the object. > * Check for batch + aggregation reading. > */ >+ @Override > protected Object valueFromRowInternalWithJoin(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) throws DatabaseException { > // PERF: Direct variable access. > Object referenceObject; >@@ -1606,6 +1601,7 @@ > * Return the value of the field from the row or a value holder on the query to obtain the object. > * Check for batch + aggregation reading. > */ >+ @Override > protected Object valueFromRowInternal(AbstractRecord row, JoinedAttributeManager joinManager, ObjectBuildingQuery sourceQuery, AbstractSession executionSession) throws DatabaseException { > // If any field in the foreign key is null then it means there are no referenced objects > // Skip for partial objects as fk may not be present. >@@ -1664,6 +1660,7 @@ > * This row is built for shallow insert which happens in case of bidirectional inserts. > * The foreign keys must be set to null to avoid constraints. > */ >+ @Override > public void writeFromObjectIntoRowForShallowInsert(Object object, AbstractRecord databaseRow, AbstractSession session) { > if (isReadOnly() || (!isForeignKeyRelationship())) { > return; >@@ -1698,6 +1695,7 @@ > * This row is built for shallow insert which happens in case of bidirectional inserts. > * The foreign keys must be set to null to avoid constraints. > */ >+ @Override > public void writeFromObjectIntoRowForShallowInsertWithChangeRecord(ChangeRecord ChangeRecord, AbstractRecord databaseRow, AbstractSession session) { > if (isReadOnly() || (!isForeignKeyRelationship())) { > return; >@@ -1714,6 +1712,7 @@ > * INTERNAL: > * Write fields needed for insert into the template for with null values. > */ >+ @Override > public void writeInsertFieldsIntoRow(AbstractRecord databaseRow, AbstractSession session) { > if (isReadOnly() || (!isForeignKeyRelationship())) { > return; >@@ -1777,10 +1776,11 @@ > * INTERNAL: > * Delete privately owned parts > */ >+ @Override > public void preDelete(DeleteObjectQuery query) throws DatabaseException, OptimisticLockException { >- if(this.mechanism != null && !isReadOnly()) { >+ if ((this.mechanism != null) && !this.isReadOnly && !this.isCascadeOnDeleteSetOnDatabase) { > AbstractRecord sourceRow = this.mechanism.buildRelationTableSourceRow(query.getObject(), query.getSession(), this); >- query.getSession().executeQuery(mechanism.deleteQuery, sourceRow); >+ query.getSession().executeQuery(this.mechanism.deleteQuery, sourceRow); > } > super.preDelete(query); > } >@@ -1793,6 +1793,7 @@ > * <p>- Construct a insert statement with above fields and values for relation table. > * <p>- execute the statement. > */ >+ @Override > public void postInsert(WriteObjectQuery query) throws DatabaseException { > super.postInsert(query); > if(this.mechanism != null && !isReadOnly()) { >@@ -1823,6 +1824,7 @@ > * Delete entries removed, insert entries added. > * If private also insert/delete/update target objects. > */ >+ @Override > public void postUpdate(WriteObjectQuery query) throws DatabaseException { > if(this.mechanism == null) { > super.postUpdate(query); >@@ -1867,6 +1869,7 @@ > * Perform the commit event. > * This is used in the uow to delay data modifications. > */ >+ @Override > public void performDataModificationEvent(Object[] event, AbstractSession session) throws DatabaseException, DescriptorException { > // Hey I might actually want to use an inner class here... ok array for now. > if (event[0] == setObject) { >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/UnidirectionalOneToManyMapping.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/UnidirectionalOneToManyMapping.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/mappings/UnidirectionalOneToManyMapping.java (working copy) >@@ -216,15 +216,6 @@ > super.preDelete(query); > } else { > updateTargetRowPreDeleteSource(query); >- if (getContainerPolicy().propagatesEventsToCollection()){ >- Object queryObject = query.getObject(); >- Object values = getAttributeValueFromObject(queryObject); >- Object iterator = containerPolicy.iteratorFor(values); >- while (containerPolicy.hasNext(iterator)){ >- Object wrappedObject = containerPolicy.nextEntry(iterator, query.getSession()); >- containerPolicy.propogatePreDelete(query, wrappedObject); >- } >- } > } > > } >@@ -275,7 +266,7 @@ > * Overridden by mappings that require objects to be deleted contribute to change set creation. > */ > @Override >- public void postCalculateChangesOnDeleted(Object deletedObject, UnitOfWorkChangeSet uowChangeSet, UnitOfWorkImpl uow) { >+ public void postCalculateChangesOnDeleted(Object deletedObject, UnitOfWorkChangeSet uowChangeSet, UnitOfWorkImpl uow) { > // the source is deleted: > // trigger the indirection - we have to get optimistic lock exception > // in case another thread has updated one of the targets: >@@ -284,10 +275,10 @@ > // then the version update is executed and it throws optimistic lock exception. > Object col = getRealCollectionAttributeValueFromObject(deletedObject, uow); > if (col != null) { >- Object iterator = containerPolicy.iteratorFor(col); >- while (containerPolicy.hasNext(iterator)) { >- Object target = containerPolicy.next(iterator, uow); >- ObjectChangeSet change = referenceDescriptor.getObjectBuilder().createObjectChangeSet(target, uowChangeSet, uow); >+ Object iterator = this.containerPolicy.iteratorFor(col); >+ while (this.containerPolicy.hasNext(iterator)) { >+ Object target = this.containerPolicy.next(iterator, uow); >+ ObjectChangeSet change = this.referenceDescriptor.getObjectBuilder().createObjectChangeSet(target, uowChangeSet, uow); > if (!change.hasChanges()) { > change.setShouldModifyVersionField(Boolean.TRUE); > ((org.eclipse.persistence.internal.sessions.UnitOfWorkChangeSet)change.getUOWChangeSet()).addObjectChangeSet(change, uow, false); >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/DeleteAllQuery.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/DeleteAllQuery.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/DeleteAllQuery.java (working copy) >@@ -24,7 +24,12 @@ > > /** > * <p><b>Purpose</b>: >- * Query used to delete a collection of objects >+ * Query used to delete a collection of objects. >+ * This is used by mappings to delete all of their target objects in a single database call. >+ * The SQL/SQLStatements must be provided. >+ * <p> >+ * DeleteAll can also be used with an Expression (or JPQL) to dynamically delete >+ * a set of objects from the database, and invalidate them in the cache. > * > * <p><b>Responsibilities</b>: > * <ul> >@@ -37,10 +42,16 @@ > */ > public class DeleteAllQuery extends ModifyAllQuery { > >- /* Vector containing objects to be deleted, these should be removed from the identity map after deletion. */ >- protected Vector objects; >+ /** List containing objects to be deleted, these should be removed from the identity map after deletion. */ >+ protected List<Object> objects; > > /** >+ * Defines if objects should be remove from the persistence context only (no database). >+ * This is used if delete was already cascaded by the database. >+ */ >+ protected boolean isInMemoryOnly; >+ >+ /** > * PUBLIC: > */ > public DeleteAllQuery() { >@@ -65,6 +76,24 @@ > } > > /** >+ * INTERNAL: >+ * Return if objects should be remove from the persistence context only (no database). >+ * This is used if delete was already cascaded by the database. >+ */ >+ public boolean isInMemoryOnly() { >+ return isInMemoryOnly; >+ } >+ >+ /** >+ * INTERNAL: >+ * Set if objects should be remove from the persistence context only (no database). >+ * This is used if delete was already cascaded by the database. >+ */ >+ public void setIsInMemoryOnly(boolean isInMemoryOnly) { >+ this.isInMemoryOnly = isInMemoryOnly; >+ } >+ >+ /** > * PUBLIC: > * Return if this is a delete all query. > */ >@@ -78,14 +107,9 @@ > * an exception should be thrown (ObjectLevelModify case), or a transaction > * should be started early and execute on parent if remote (dataModify case). > * A modify query is NEVER executed on the parent, unless remote session. >- * @param unitOfWork >- * @param translationRow >- * @return >- * @throws org.eclipse.persistence.essentials.exceptions.DatabaseException >- * @throws org.eclipse.persistence.essentials.exceptions.OptimisticLockException > */ >- public Object executeInUnitOfWork(UnitOfWorkImpl unitOfWork, AbstractRecord translationRow) throws DatabaseException, OptimisticLockException { >- if (getObjects() != null) { >+ public Object executeInUnitOfWork(UnitOfWorkImpl unitOfWork, AbstractRecord translationRow) throws DatabaseException { >+ if (this.objects != null) { > if (unitOfWork.isAfterWriteChangesButBeforeCommit()) { > throw ValidationException.illegalOperationForUnitOfWorkLifecycle(unitOfWork.getLifecycle(), "executeQuery(DeleteAllQuery)"); > } >@@ -110,68 +134,72 @@ > */ > public Object executeDatabaseQuery() throws DatabaseException { > // CR# 4286 >- if (getObjects() != null) { >+ if (this.objects != null) { > >- if(isExpressionQuery() && getSelectionCriteria() == null) { >+ if (isExpressionQuery() && getSelectionCriteria() == null) { > // DeleteAllQuery has objects so it *must* have selectionCriteria, too >- throw QueryException.deleteAllQuerySpecifiesObjectsButNotSelectionCriteria(getDescriptor(), this, getObjects().toString()); >+ throw QueryException.deleteAllQuerySpecifiesObjectsButNotSelectionCriteria(getDescriptor(), this, this.objects.toString()); > } > > // Optimistic lock check not required because objects are deleted individually in that case. > try { >- getSession().beginTransaction(); >+ this.session.beginTransaction(); > > // Need to run pre-delete selector if available. > // PERF: Avoid events if no listeners. >- if (getDescriptor().getEventManager().hasAnyEventListeners()) { >- for (Enumeration deletedObjectsEnum = getObjects().elements(); >- deletedObjectsEnum.hasMoreElements();) { >- DescriptorEvent event = new DescriptorEvent(deletedObjectsEnum.nextElement()); >+ if (this.descriptor.getEventManager().hasAnyEventListeners()) { >+ for (Object object : this.objects) { >+ DescriptorEvent event = new DescriptorEvent(object); > event.setEventCode(DescriptorEventManager.PreDeleteEvent); >- event.setSession(getSession()); >+ event.setSession(this.session); > event.setQuery(this); >- getDescriptor().getEventManager().executeEvent(event); >+ this.descriptor.getEventManager().executeEvent(event); > } > } >+ >+ if (this.isInMemoryOnly) { >+ result = Integer.valueOf(0); >+ } else { >+ result = this.queryMechanism.deleteAll(); >+ } > >- result = getQueryMechanism().deleteAll(); >- > // Need to run post-delete selector if available. > // PERF: Avoid events if no listeners. >- if (getDescriptor().getEventManager().hasAnyEventListeners()) { >- for (Enumeration deletedObjectsEnum = getObjects().elements(); >- deletedObjectsEnum.hasMoreElements();) { >- DescriptorEvent event = new DescriptorEvent(deletedObjectsEnum.nextElement()); >+ if (this.descriptor.getEventManager().hasAnyEventListeners()) { >+ for (Object object : this.objects) { >+ DescriptorEvent event = new DescriptorEvent(object); > event.setEventCode(DescriptorEventManager.PostDeleteEvent); >- event.setSession(getSession()); >+ event.setSession(this.session); > event.setQuery(this); >- getDescriptor().getEventManager().executeEvent(event); >+ this.descriptor.getEventManager().executeEvent(event); > } > } > > if (shouldMaintainCache()) { > // remove from the cache. >- for (Enumeration objectsEnum = getObjects().elements(); >- objectsEnum.hasMoreElements();) { >- Object deleted = objectsEnum.nextElement(); >- if (getSession().isUnitOfWork()) { >+ for (Object deleted : this.objects) { >+ if (this.session.isUnitOfWork()) { > //BUG #2612169: Unwrap is needed >- deleted = getDescriptor().getObjectBuilder().unwrapObject(deleted, getSession()); >- ((UnitOfWorkImpl)getSession()).addObjectDeletedDuringCommit(deleted, getDescriptor()); >+ deleted = this.descriptor.getObjectBuilder().unwrapObject(deleted, getSession()); >+ ((UnitOfWorkImpl)this.session).addObjectDeletedDuringCommit(deleted, this.descriptor); > } else { >- getSession().getIdentityMapAccessor().removeFromIdentityMap(deleted); >+ this.session.getIdentityMapAccessor().removeFromIdentityMap(deleted); > } > } > } > >- getSession().commitTransaction(); >+ this.session.commitTransaction(); > > } catch (RuntimeException exception) { >- getSession().rollbackTransaction(); >+ this.session.rollbackTransaction(); > throw exception; > } > } else { >- result = getQueryMechanism().deleteAll();// fire the SQL to the database >+ if (this.isInMemoryOnly) { >+ result = Integer.valueOf(0); >+ } else { >+ result = this.queryMechanism.deleteAll();// fire the SQL to the database >+ } > mergeChangesIntoSharedCache(); > } > >@@ -186,7 +214,7 @@ > this.checkPrepare(session, translationRow); > DeleteAllQuery queryToExecute = (DeleteAllQuery)clone(); > >- // Then prapared for the single execution. >+ // Then prepare for the single execution. > queryToExecute.setTranslationRow(translationRow); > queryToExecute.setSession(session); > queryToExecute.setObjects(objects); >@@ -208,7 +236,7 @@ > * PUBLIC: > * Return the objects that are to be deleted > */ >- public Vector getObjects() { >+ public List<Object> getObjects() { > return objects; > } > >@@ -243,23 +271,22 @@ > * Set the objects to be deleted. > * Also REQUIRED is a selection criteria or SQL string that performs the deletion of the objects. > * This does not generate the SQL call from the deleted objects. >- * #setObject() should not be called. >- * >- * Vector objects used as an indicator of one of two possible >+ * <p> >+ * List objects used as an indicator of one of two possible > * ways the query may behave: >- * objects != null - the "old" functionality used by OneToMany mapping >+ * <p> objects != null - the "old" functionality used by OneToMany mapping > * objects deleted from the cache, either selection expression or custom sql > * should be provided for deletion from db; >- * objects == null - the "new" functionality (on par with UpdateAllQuery) >+ * <p> objects == null - the "new" functionality (on par with UpdateAllQuery) > * the cache is either left alone or in-memory query finds the cached objects to be deleted, >- * and these objects are invalidated in cache. >- * >- * Note that empty objects is still objects != case. >+ * and these objects are invalidated in cache. >+ * <p> >+ * Note that empty objects is still objects != case. > * Signal that no cache altering is required. > * Used by AggregationCollectionMapping and OneToManyMapping in case they use indirection >- * and the ValueHolder has not been instantiated. >+ * and the ValueHolder has not been instantiated. > */ >- public void setObjects(Vector objectCollection) { >- objects = objectCollection; >+ public void setObjects(List<Object> objectCollection) { >+ this.objects = objectCollection; > } > } >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/DeleteObjectQuery.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/DeleteObjectQuery.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/DeleteObjectQuery.java (working copy) >@@ -133,7 +133,7 @@ > Object object = getObject(); > boolean isUnitOfWork = session.isUnitOfWork(); > try { >- // Check if the object has already been commited, then no work is required >+ // Check if the object has already been committed, then no work is required > if (commitManager.isProcessedCommit(object)) { > return object; > } >@@ -165,7 +165,15 @@ > if (QueryMonitor.shouldMonitor()) { > QueryMonitor.incrementDelete(this); > } >- int rowCount = getQueryMechanism().deleteObject().intValue(); >+ int rowCount = 0; >+ // If the object was/will be deleted from a cascade delete constraint, ignore it. >+ if (isUnitOfWork && ((UnitOfWorkImpl)session).hasCascadeDeleteObjects() >+ && ((UnitOfWorkImpl)session).getCascadeDeleteObjects().contains(object)) { >+ // Cascade delete does not check optimistic lock, assume ok. >+ rowCount = 1; >+ } else { >+ rowCount = getQueryMechanism().deleteObject().intValue(); >+ } > > if (rowCount < 1) { > if (session.hasEventManager()) { >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/ModifyAllQuery.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/ModifyAllQuery.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/queries/ModifyAllQuery.java (working copy) >@@ -24,7 +24,7 @@ > > /** > * PUBLIC: >- * Query used to perform a bulk delete using TopLink's expression framework. >+ * Query used to perform a bulk delete using the expression framework. > * > * @author Andrei Ilitchev > * @date August 18, 2005 >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/factories/ProjectClassGenerator.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/factories/ProjectClassGenerator.java (revision 8110) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/factories/ProjectClassGenerator.java (working copy) >@@ -1412,12 +1412,9 @@ > methodDefinition.addLine(""); > > // Sort by name. >- Vector descriptors = buildSortedVectorOfDescriptors(getProject().getOrderedDescriptors()); >+ List<ClassDescriptor> descriptors = buildSortedListOfDescriptors(getProject().getOrderedDescriptors()); > >- for (Enumeration descriptorsEnum = descriptors.elements(); >- descriptorsEnum.hasMoreElements();) { >- ClassDescriptor descriptor = (ClassDescriptor)descriptorsEnum.nextElement(); >- >+ for (ClassDescriptor descriptor : descriptors) { > // Singleton interface descriptors should not exist. > if (!(descriptor.isDescriptorForInterface() && (descriptor.getInterfacePolicy().getImplementorDescriptor() != null))) { > methodDefinition.addLine("addDescriptor(build" + getDescriptorMethodNames().get(descriptor) + "ClassDescriptor());"); >@@ -1554,18 +1551,18 @@ > } > > /** >- * Take an unsorted vector of descriptors and sort it so that the order is maintained. >+ * Take an unsorted list of descriptors and sort it so that the order is maintained. > */ >- private Vector buildSortedVectorOfDescriptors(Vector descriptors) { >- Vector returnDescriptors = Helper.addAllUniqueToVector(new Vector(descriptors.size()), descriptors); >+ private List<ClassDescriptor> buildSortedListOfDescriptors(List<ClassDescriptor> descriptors) { >+ List returnDescriptors = Helper.addAllUniqueToList(new ArrayList(descriptors.size()), descriptors); > Object[] descriptorsArray = new Object[returnDescriptors.size()]; > for (int index = 0; index < returnDescriptors.size(); index++) { >- descriptorsArray[index] = returnDescriptors.elementAt(index); >+ descriptorsArray[index] = returnDescriptors.get(index); > } > Arrays.sort(descriptorsArray, new DescriptorCompare()); >- returnDescriptors = new Vector(returnDescriptors.size()); >- for (int index = 0; index < descriptorsArray.length; index++) { >- returnDescriptors.addElement(descriptorsArray[index]); >+ returnDescriptors = new ArrayList(descriptorsArray.length); >+ for (Object descriptor : descriptorsArray) { >+ returnDescriptors.add(descriptor); > } > return returnDescriptors; > } >@@ -1783,10 +1780,7 @@ > classDefinition.addMethod(buildLoginMethod(getProject().getDatasourceLogin())); > } > >- Iterator descriptors = buildSortedVectorOfDescriptors(getProject().getOrderedDescriptors()).iterator(); >- while (descriptors.hasNext()) { >- ClassDescriptor descriptor = (ClassDescriptor)descriptors.next(); >- >+ for (ClassDescriptor descriptor : buildSortedListOfDescriptors(getProject().getOrderedDescriptors())) { > // Singleton interface descriptors should not exist. > if (!(descriptor.isDescriptorForInterface() && (descriptor.getInterfacePolicy().getImplementorDescriptor() != null))) { > classDefinition.addMethod(buildDescriptorMethod(descriptor)); >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/factories/TableCreatorClassGenerator.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/factories/TableCreatorClassGenerator.java (revision 8110) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/factories/TableCreatorClassGenerator.java (working copy) >@@ -101,13 +101,11 @@ > method.addLine(foreignKeyName + ".setName(\"" + foreignKey.getName() + "\");"); > method.addLine(foreignKeyName + ".setTargetTable(\"" + foreignKey.getTargetTable() + "\");"); > >- for (Enumeration sourceFieldsEnum = foreignKey.getSourceFields().elements(); >- sourceFieldsEnum.hasMoreElements();) { >- method.addLine(foreignKeyName + ".addSourceField(\"" + sourceFieldsEnum.nextElement() + "\");"); >+ for (String sourceField : foreignKey.getSourceFields()) { >+ method.addLine(foreignKeyName + ".addSourceField(\"" + sourceField + "\");"); > } >- for (Enumeration targetFieldsEnum = foreignKey.getTargetFields().elements(); >- targetFieldsEnum.hasMoreElements();) { >- method.addLine(foreignKeyName + ".addTargetField(\"" + targetFieldsEnum.nextElement() + "\");"); >+ for (String targetField : foreignKey.getTargetFields()) { >+ method.addLine(foreignKeyName + ".addTargetField(\"" + targetField + "\");"); > } > > method.addLine("table.addForeignKeyConstraint(" + foreignKeyName + ");"); >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/Project.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/Project.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/Project.java (working copy) >@@ -52,8 +52,8 @@ > public class Project implements Serializable, Cloneable { > protected String name; > protected Login datasourceLogin; >- protected Map descriptors; >- protected Vector orderedDescriptors; >+ protected Map<Class, ClassDescriptor> descriptors; >+ protected List<ClassDescriptor> orderedDescriptors; > > /** Holds the default set of read-only classes that apply to each UnitOfWork. */ > protected Vector defaultReadOnlyClasses; >@@ -561,7 +561,7 @@ > * Return the descriptors in the order added. > * Used to maintain consistent order in XML. > */ >- public Vector getOrderedDescriptors() { >+ public List<ClassDescriptor> getOrderedDescriptors() { > return orderedDescriptors; > } > >@@ -570,10 +570,9 @@ > * Set the descriptors order. > * Used to maintain consistent order in XML. > */ >- public void setOrderedDescriptors(Vector orderedDescriptors) { >+ public void setOrderedDescriptors(List<ClassDescriptor> orderedDescriptors) { > this.orderedDescriptors = orderedDescriptors; >- for (Enumeration e = orderedDescriptors.elements(); e.hasMoreElements();) { >- ClassDescriptor descriptor = (ClassDescriptor)e.nextElement(); >+ for (ClassDescriptor descriptor : orderedDescriptors) { > String alias = descriptor.getAlias(); > if (alias != null) { > addAlias(alias, descriptor); >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/server/ServerSession.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/server/ServerSession.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/sessions/server/ServerSession.java (working copy) >@@ -374,8 +374,6 @@ > if (!isConnected()) { > throw ValidationException.loginBeforeAllocatingClientSessions(); > } >- >- log(SessionLog.FINER, SessionLog.CONNECTION, "client_acquired"); > if (!connectionPolicy.isPooled() && (connectionPolicy.getLogin() == null)) { > //the user has passed in a connection policy with no login info. Use the > //default info from the default connection policy >@@ -411,6 +409,9 @@ > if (!connectionPolicy.isLazy()) { > acquireClientConnection(client); > } >+ if (shouldLog(SessionLog.FINER, SessionLog.CONNECTION)) { >+ log(SessionLog.FINER, SessionLog.CONNECTION, "client_acquired", String.valueOf(System.identityHashCode(client))); >+ } > > return client; > } >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/DefaultTableGenerator.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/DefaultTableGenerator.java (revision 8110) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/DefaultTableGenerator.java (working copy) >@@ -15,11 +15,11 @@ > import java.sql.DatabaseMetaData; > import java.sql.ResultSet; > import java.sql.SQLException; >+import java.util.ArrayList; > import java.util.HashMap; > import java.util.Iterator; > import java.util.List; > import java.util.Map; >-import java.util.Vector; > > import org.eclipse.persistence.descriptors.ClassDescriptor; > import org.eclipse.persistence.eis.EISDescriptor; >@@ -100,37 +100,41 @@ > * @since Oracle TopLink 10.1.3 > */ > public class DefaultTableGenerator { >- //the project object used to generate the default data schema. >+ /** The project object used to generate the default data schema. */ > Project project = null; > >- //the target database platform >- private DatabasePlatform databasePlatform; >+ /** the target database platform. */ >+ protected DatabasePlatform databasePlatform; > >- //used to track the table definition: keyed by the table name, and valued >- //by the table definition object >- private Map<String, TableDefinition> tableMap = null; >+ /** >+ * Used to track the table definition: keyed by the table name, and valued >+ * by the table definition object. >+ */ >+ protected Map<String, TableDefinition> tableMap = null; > >- //used to track the field definition: keyed by the database field object, and >- //valued by the field definition. >- private Map<DatabaseField, FieldDefinition> fieldMap = null; >+ /** >+ * Used to track the field definition: keyed by the database field object, and >+ * valued by the field definition. >+ */ >+ protected Map<DatabaseField, FieldDefinition> fieldMap = null; > >- //DatabaseField pool (synchronized with above 'fieldMap') >- private Map<DatabaseField, DatabaseField> databaseFields; >+ /** DatabaseField pool (synchronized with above 'fieldMap') */ >+ protected Map<DatabaseField, DatabaseField> databaseFields; > >- //When this flag is 'false' EclipseLink will not attempt to create fk constraints >+ /** When this flag is 'false' EclipseLink will not attempt to create fk constraints. */ > protected boolean generateFKConstraints = true; > > /** >- * Default construcotr >+ * Default constructor > */ > public DefaultTableGenerator(Project project) { > this.project = project; > if (project.getDatasourceLogin().getDatasourcePlatform() instanceof DatabasePlatform){ >- databasePlatform = (DatabasePlatform)project.getDatasourceLogin().getDatasourcePlatform(); >+ this.databasePlatform = (DatabasePlatform)project.getDatasourceLogin().getDatasourcePlatform(); > } >- tableMap = new HashMap(); >- fieldMap = new HashMap(); >- databaseFields = new HashMap(); >+ this.tableMap = new HashMap(); >+ this.fieldMap = new HashMap(); >+ this.databaseFields = new HashMap(); > } > > /** >@@ -149,12 +153,9 @@ > TableCreator tblCreator = new TableCreator(); > > //go through each descriptor and build the table/field definitions out of mappings >- Iterator descIter = project.getDescriptors().values().iterator(); >+ for (ClassDescriptor descriptor : this.project.getDescriptors().values()) { > >- while (descIter.hasNext()) { >- ClassDescriptor desc = (ClassDescriptor)descIter.next(); >- >- if ((desc instanceof XMLDescriptor) || (desc instanceof EISDescriptor) || (desc instanceof ObjectRelationalDataTypeDescriptor)) { >+ if ((descriptor instanceof XMLDescriptor) || (descriptor instanceof EISDescriptor) || (descriptor instanceof ObjectRelationalDataTypeDescriptor)) { > //default table generator does not support ox, eis and object-relational descriptor > AbstractSessionLog.getLog().log(SessionLog.WARNING, "relational_descriptor_support_only", (Object[])null, true); > >@@ -164,22 +165,18 @@ > // Aggregate descriptors do not contain table/field data and are > // processed through their owning entities. Aggregate descriptors > // can not exist on their own. >- if (!desc.isAggregateDescriptor() && !desc.isAggregateCollectionDescriptor()) { >- initTableSchema(desc); >+ if (!descriptor.isAggregateDescriptor() && !descriptor.isAggregateCollectionDescriptor()) { >+ initTableSchema(descriptor); > } > } > > //Post init the schema for relation table and direct collection/map tables, and several special mapping handlings. >- descIter = project.getOrderedDescriptors().iterator(); >- >- while (descIter.hasNext()) { >- ClassDescriptor desc = (ClassDescriptor) descIter.next(); >- >+ for (ClassDescriptor descriptor : this.project.getOrderedDescriptors()) { > // Aggregate descriptors do not contain table/field data and are > // processed through their owning entities. Aggregate descriptors > // can not exist on their own. >- if (!desc.isAggregateDescriptor() && !desc.isAggregateCollectionDescriptor()) { >- postInitTableSchema(desc); >+ if (!descriptor.isAggregateDescriptor() && !descriptor.isAggregateCollectionDescriptor()) { >+ postInitTableSchema(descriptor); > } > } > >@@ -211,7 +208,7 @@ > } > DatabaseMetaData dbMetaData = conn.getMetaData(); > ResultSet resultSet = dbMetaData.getTables(null, dbMetaData.getUserName(), null, new String[] { "TABLE" }); >- java.util.List tablesInDatabase = new java.util.ArrayList(); >+ List tablesInDatabase = new ArrayList(); > > while (resultSet.next()) { > //save all tables from the database >@@ -220,8 +217,8 @@ > > resultSet.close(); > >- java.util.List existedTables = new java.util.ArrayList(); >- java.util.List existedTableNames = new java.util.ArrayList(); >+ List existedTables = new ArrayList(); >+ List existedTableNames = new ArrayList(); > Iterator tblDefIter = tblCreator.getTableDefinitions().iterator(); > > while (tblDefIter.hasNext()) { >@@ -278,8 +275,9 @@ > } > > //build or retrieve the field definition. >- FieldDefinition fieldDef = getFieldDefFromDBField(dbField, isPKField); >+ FieldDefinition fieldDef = getFieldDefFromDBField(dbField); > if (isPKField) { >+ fieldDef.setIsPrimaryKey(true); > // Check if the generation strategy is IDENTITY > String sequenceName = descriptor.getSequenceNumberName(); > DatabaseLogin login = this.project.getLogin(); >@@ -306,9 +304,9 @@ > * and direct-collection, direct-map table, as well as reset LOB type for serialized > * object mapping and type conversion mapping for LOB usage > */ >- private void postInitTableSchema(ClassDescriptor desc) { >- for (DatabaseMapping mapping : desc.getMappings()) { >- if (desc.isChildDescriptor() && desc.getInheritancePolicy().getParentDescriptor().getMappingForAttributeName(mapping.getAttributeName()) != null) { >+ protected void postInitTableSchema(ClassDescriptor descriptor) { >+ for (DatabaseMapping mapping : descriptor.getMappings()) { >+ if (descriptor.isChildDescriptor() && descriptor.getInheritancePolicy().getParentDescriptor().getMappingForAttributeName(mapping.getAttributeName()) != null) { > // If we are an inheritance subclass, do nothing. That is, don't > // generate mappings that will be generated by our parent, > // otherwise the fields for that mapping will be generated n >@@ -317,7 +315,7 @@ > } else if (mapping.isManyToManyMapping()) { > buildRelationTableDefinition((ManyToManyMapping)mapping, ((ManyToManyMapping)mapping).getRelationTableMechanism(), ((ManyToManyMapping)mapping).getListOrderField(), mapping.getContainerPolicy()); > } else if (mapping.isDirectCollectionMapping()) { >- buildDirectCollectionTableDefinition((DirectCollectionMapping) mapping, desc); >+ buildDirectCollectionTableDefinition((DirectCollectionMapping) mapping, descriptor); > } else if (mapping.isDirectToFieldMapping()) { > Converter converter = ((DirectToFieldMapping)mapping).getConverter(); > if (converter != null) { >@@ -327,7 +325,7 @@ > > if (converter instanceof SerializedObjectConverter) { > //serialized object mapping field should be BLOB/IMAGE >- getFieldDefFromDBField(mapping.getField(), false).setType(Byte[].class); >+ getFieldDefFromDBField(mapping.getField()).setType(Byte[].class); > } > } > } else if (mapping.isAggregateCollectionMapping()) { >@@ -353,7 +351,7 @@ > } > } > >- processAdditionalTablePkFields(desc); >+ processAdditionalTablePkFields(descriptor); > } > > >@@ -361,23 +359,21 @@ > * The ContainerPolicy may contain some additional fields that should be added to the table > * > * @see MappedKeyMapContainerPolicy >- * @param cp >- * @param tblDef > */ >- private void addFieldsForMappedKeyMapContainerPolicy(ContainerPolicy cp, TableDefinition tblDef){ >+ protected void addFieldsForMappedKeyMapContainerPolicy(ContainerPolicy cp, TableDefinition table){ > if (cp.isMappedKeyMapPolicy()){ > List<DatabaseField> keyFields = cp.getIdentityFieldsForMapKey(); > Iterator<DatabaseField> i = keyFields.iterator(); > while (i.hasNext()){ > DatabaseField foreignKey = i.next(); >- FieldDefinition fieldDef = getFieldDefFromDBField(foreignKey, false); >- if (!tblDef.getFields().contains(fieldDef)) { >- tblDef.addField(getFieldDefFromDBField(foreignKey, false)); >+ FieldDefinition fieldDef = getFieldDefFromDBField(foreignKey); >+ if (!table.getFields().contains(fieldDef)) { >+ table.addField(getFieldDefFromDBField(foreignKey)); > } > } > Map<DatabaseField, DatabaseField> foreignKeys = ((MappedKeyMapContainerPolicy)cp).getForeignKeyFieldsForMapKey(); > if (foreignKeys != null){ >- addForeignMappingFkConstraint(foreignKeys); >+ addForeignMappingFkConstraint(foreignKeys, false); > } > } > } >@@ -385,50 +381,50 @@ > /** > * Build relation table definitions for all many-to-many relationships in a EclipseLink descriptor. > */ >- private void buildRelationTableDefinition(ForeignReferenceMapping mapping, RelationTableMechanism relationTableMechanism, DatabaseField listOrderField, ContainerPolicy cp) { >+ protected void buildRelationTableDefinition(ForeignReferenceMapping mapping, RelationTableMechanism relationTableMechanism, DatabaseField listOrderField, ContainerPolicy cp) { > //first create relation table >- TableDefinition tblDef = getTableDefFromDBTable(relationTableMechanism.getRelationTable()); >+ TableDefinition table = getTableDefFromDBTable(relationTableMechanism.getRelationTable()); > > //add source foreign key fields into the relation table >- Vector srcFkFields = relationTableMechanism.getSourceRelationKeyFields(); >- Vector srcKeyFields = relationTableMechanism.getSourceKeyFields(); >+ List<DatabaseField> srcFkFields = relationTableMechanism.getSourceRelationKeyFields(); >+ List<DatabaseField> srcKeyFields = relationTableMechanism.getSourceKeyFields(); > >- buildRelationTableFields(mapping, tblDef, srcFkFields, srcKeyFields); >+ buildRelationTableFields(mapping, table, srcFkFields, srcKeyFields); > > //add target foreign key fields into the relation table >- Vector targFkFields = relationTableMechanism.getTargetRelationKeyFields(); >- Vector targKeyFields = relationTableMechanism.getTargetKeyFields(); >+ List<DatabaseField> targFkFields = relationTableMechanism.getTargetRelationKeyFields(); >+ List<DatabaseField> targKeyFields = relationTableMechanism.getTargetKeyFields(); > >- buildRelationTableFields(mapping, tblDef, targFkFields, targKeyFields); >+ buildRelationTableFields(mapping, table, targFkFields, targKeyFields); > > if (cp != null){ >- addFieldsForMappedKeyMapContainerPolicy(cp, tblDef); >+ addFieldsForMappedKeyMapContainerPolicy(cp, table); > } > >- if(listOrderField != null) { >- tblDef.addField(getFieldDefFromDBField(listOrderField, false)); >+ if (listOrderField != null) { >+ table.addField(getFieldDefFromDBField(listOrderField)); > } > } > > /** > * Build field definitions and foreign key constraints for all many-to-many relation table. > */ >- private void buildRelationTableFields(ForeignReferenceMapping mapping, TableDefinition tblDef, Vector fkFields, Vector targetFields) { >+ protected void buildRelationTableFields(ForeignReferenceMapping mapping, TableDefinition table, List<DatabaseField> fkFields, List<DatabaseField> targetFields) { > assert fkFields.size() > 0 && fkFields.size() == targetFields.size(); > > DatabaseField fkField = null; > DatabaseField targetField = null; >- Vector<String> fkFieldNames = new Vector(); >- Vector<String> targetFieldNames = new Vector(); >+ List<String> fkFieldNames = new ArrayList(); >+ List<String> targetFieldNames = new ArrayList(); > > for (int index = 0; index < fkFields.size(); index++) { >- fkField = (DatabaseField) fkFields.get(index); >- targetField = (DatabaseField) targetFields.get(index); >+ fkField = fkFields.get(index); >+ targetField = targetFields.get(index); > fkFieldNames.add(fkField.getNameDelimited(databasePlatform)); > targetFieldNames.add(targetField.getNameDelimited(databasePlatform)); > > fkField = resolveDatabaseField(fkField, targetField); >- setFieldToRelationTable(fkField, tblDef); >+ setFieldToRelationTable(fkField, table); > } > > // add a foreign key constraint from fk field to target field >@@ -443,63 +439,78 @@ > && mapping.getReferenceDescriptor().getTablePerClassPolicy().hasChild()) { > return; > } >- addForeignKeyConstraint(tblDef, targetTblDef, fkFieldNames, targetFieldNames); >+ addForeignKeyConstraint(table, targetTblDef, fkFieldNames, targetFieldNames, mapping.isCascadeOnDeleteSetOnDatabase()); > } > > /** > * Build direct collection table definitions in a EclipseLink descriptor > */ >- private void buildDirectCollectionTableDefinition(DirectCollectionMapping mapping, ClassDescriptor desc) { >+ protected void buildDirectCollectionTableDefinition(DirectCollectionMapping mapping, ClassDescriptor descriptor) { > //first create direct collection table >- TableDefinition tblDef = getTableDefFromDBTable(mapping.getReferenceTable()); >+ TableDefinition table = getTableDefFromDBTable(mapping.getReferenceTable()); > > DatabaseField dbField = null; >- >- //add the table reference key(s) >- Vector refPkFields = mapping.getReferenceKeyFields(); >- >- for (int index = 0; index < refPkFields.size(); index++) { >- dbField = resolveDatabaseField((DatabaseField) refPkFields.get(index), mapping.getSourceKeyFields().get(index)); >- tblDef.addField(getDirectCollectionReferenceKeyFieldDefFromDBField(dbField)); >+ DatabaseField targetField = null; >+ List<String> fkFieldNames = new ArrayList(); >+ List<String> targetFieldNames = new ArrayList(); >+ List<DatabaseField> fkFields = mapping.getReferenceKeyFields(); >+ List<DatabaseField> targetFields = mapping.getSourceKeyFields(); >+ for (int index = 0; index < fkFields.size(); index++) { >+ DatabaseField fkField = fkFields.get(index); >+ targetField = targetFields.get(index); >+ fkFieldNames.add(fkField.getNameDelimited(databasePlatform)); >+ targetFieldNames.add(targetField.getNameDelimited(databasePlatform)); >+ >+ fkField = resolveDatabaseField(fkField, targetField); >+ table.addField(getFieldDefFromDBField(fkField)); > } >+ >+ // add a foreign key constraint from fk field to target field >+ DatabaseTable targetTable = targetField.getTable(); >+ TableDefinition targetTblDef = getTableDefFromDBTable(targetTable); > > //add the direct collection field to the table. >- tblDef.addField(getFieldDefFromDBField(mapping.getDirectField(), false)); >+ table.addField(getFieldDefFromDBField(mapping.getDirectField())); > > //if the mapping is direct-map field, add the direct key field to the table as well. > // TODO: avoid generating DDL for map key mappings for the time being. > // Bug: 270814 > if (mapping.isDirectMapMapping() && ! mapping.getContainerPolicy().isMappedKeyMapPolicy() ) { > dbField = ((DirectMapMapping) mapping).getDirectKeyField(); >- tblDef.addField(getFieldDefFromDBField(dbField, false)); >+ table.addField(getFieldDefFromDBField(dbField)); > } else { >- addFieldsForMappedKeyMapContainerPolicy(mapping.getContainerPolicy(), tblDef); >+ addFieldsForMappedKeyMapContainerPolicy(mapping.getContainerPolicy(), table); > >- if(mapping.getListOrderField() != null) { >- tblDef.addField(getFieldDefFromDBField(mapping.getListOrderField(), false)); >+ if (mapping.getListOrderField() != null) { >+ table.addField(getFieldDefFromDBField(mapping.getListOrderField())); > } > } >+ if (mapping.getDescriptor().hasTablePerClassPolicy() >+ && mapping.getDescriptor().getTablePerClassPolicy().hasChild()) { >+ return; >+ } >+ addForeignKeyConstraint(table, targetTblDef, fkFieldNames, targetFieldNames, mapping.isCascadeOnDeleteSetOnDatabase()); > } > > /** > * Reset field type to use BLOB/CLOB with type conversion mapping fix for 4k oracle thin driver bug. > */ >- private void resetFieldTypeForLOB(DirectToFieldMapping mapping) { >+ protected void resetFieldTypeForLOB(DirectToFieldMapping mapping) { > if (mapping.getFieldClassification().getName().equals("java.sql.Blob")) { > //allow the platform to figure out what database field type gonna be used. > //For example, Oracle9 will generate BLOB type, SQL Server generats IMAGE. >- getFieldDefFromDBField(mapping.getField(), false).setType(Byte[].class); >+ getFieldDefFromDBField(mapping.getField()).setType(Byte[].class); > } else if (mapping.getFieldClassification().getName().equals("java.sql.Clob")) { > //allow the platform to figure out what database field type gonna be used. > //For example, Oracle9 will generate CLOB type. SQL Server generats TEXT. >- getFieldDefFromDBField(mapping.getField(), false).setType(Character[].class); >+ getFieldDefFromDBField(mapping.getField()).setType(Character[].class); > } > } > > /** > * Reset the transformation mapping field types > */ >- private void resetTransformedFieldType(TransformationMapping mapping) { >+ protected void resetTransformedFieldType(TransformationMapping mapping) { > Iterator transIter = mapping.getFieldTransformations().iterator(); > while (transIter.hasNext()) { > FieldTransformation transformation = (FieldTransformation) transIter.next(); >@@ -508,7 +519,7 @@ > MethodBasedFieldTransformation methodTransformation = (MethodBasedFieldTransformation) transformation; > try { > Class returnType = Helper.getDeclaredMethod(mapping.getDescriptor().getJavaClass(), methodTransformation.getMethodName(), null).getReturnType(); >- getFieldDefFromDBField(methodTransformation.getField(), false).setType(returnType); >+ getFieldDefFromDBField(methodTransformation.getField()).setType(returnType); > } catch (NoSuchMethodException ex) { > // For some reason, the method type could not be retrieved, > // use the default java.lang.String type >@@ -529,7 +540,7 @@ > throw ValidationException.missingFieldTypeForDDLGenerationOfClassTransformation(mapping.getDescriptor(), mapping.getAttributeName(), methodName); > } > >- getFieldDefFromDBField(classTransformation.getField(), false).setType(returnType); >+ getFieldDefFromDBField(classTransformation.getField()).setType(returnType); > } catch (NoSuchMethodException ex) { > // For some reason, the method type could not be retrieved. > // Did the interface method change? Throw an exception. >@@ -543,74 +554,112 @@ > * Add the foreign key to the aggregate collection mapping target table. > * Also add listOrderField if specified. > */ >- private void createAggregateTargetTable(AggregateCollectionMapping mapping) { >- TableDefinition targTblDef = getTableDefFromDBTable(mapping.getReferenceDescriptor().getDefaultTable()); >- addFieldsForMappedKeyMapContainerPolicy(mapping.getContainerPolicy(), targTblDef); >+ protected void createAggregateTargetTable(AggregateCollectionMapping mapping) { >+ TableDefinition targetTable = getTableDefFromDBTable(mapping.getReferenceDescriptor().getDefaultTable()); >+ addFieldsForMappedKeyMapContainerPolicy(mapping.getContainerPolicy(), targetTable); > > Iterator aggregateFieldIterator = mapping.getReferenceDescriptor().getFields().iterator(); > while (aggregateFieldIterator.hasNext()) { > DatabaseField dbField = (DatabaseField) aggregateFieldIterator.next(); >- //add the target foreign key field definition to the table definition >- targTblDef.addField(getFieldDefFromDBField(dbField, false)); >+ //add the target definition to the table definition >+ targetTable.addField(getFieldDefFromDBField(dbField)); > } > > //unlike normal one-to-many mapping, aggregate collection mapping does not have 1:1 back reference > //mapping, so the target foreign key fields are not stored in the target descriptor. >- Iterator targFKIter = mapping.getTargetForeignKeyFields().iterator(); >- >- while (targFKIter.hasNext()) { >- DatabaseField dbField = (DatabaseField) targFKIter.next(); >- //add the target foreign key field definition to the table definition >- targTblDef.addField(getFieldDefFromDBField(dbField, false)); >+ List<String> fkFieldNames = new ArrayList(); >+ List<String> targetFieldNames = new ArrayList(); >+ List<DatabaseField> fkFields = mapping.getTargetForeignKeyFields(); >+ List<DatabaseField> targetFields = mapping.getSourceKeyFields(); >+ DatabaseField targetField = null; >+ for (int index = 0; index < fkFields.size(); index++) { >+ DatabaseField fkField = fkFields.get(index); >+ targetField = targetFields.get(index); >+ fkFieldNames.add(fkField.getNameDelimited(databasePlatform)); >+ targetFieldNames.add(targetField.getNameDelimited(databasePlatform)); >+ >+ fkField = resolveDatabaseField(fkField, targetField); >+ targetTable.addField(getFieldDefFromDBField(fkField)); > } >- >- if(mapping.getListOrderField() != null) { >- getTableDefFromDBTable(mapping.getListOrderField().getTable()).addField(getFieldDefFromDBField(mapping.getListOrderField(), false)); >+ >+ // add a foreign key constraint from fk field to target field >+ DatabaseTable sourceDatabaseTable = targetField.getTable(); >+ TableDefinition sourceTable = getTableDefFromDBTable(sourceDatabaseTable); >+ >+ if (mapping.getListOrderField() != null) { >+ getTableDefFromDBTable(mapping.getListOrderField().getTable()).addField(getFieldDefFromDBField(mapping.getListOrderField())); > } >+ if (mapping.getDescriptor().hasTablePerClassPolicy() >+ && mapping.getDescriptor().getTablePerClassPolicy().hasChild()) { >+ return; >+ } >+ addForeignKeyConstraint(targetTable, sourceTable, fkFieldNames, targetFieldNames, mapping.isCascadeOnDeleteSetOnDatabase()); > } > >- private void addForeignKeyFieldToSourceTargetTable(OneToOneMapping mapping) { >+ protected void addForeignKeyFieldToSourceTargetTable(OneToOneMapping mapping) { > if (!mapping.isForeignKeyRelationship() > || (mapping.getReferenceDescriptor().hasTablePerClassPolicy() > && mapping.getReferenceDescriptor().getTablePerClassPolicy().hasChild())) { > return; > } >- >- addForeignMappingFkConstraint(mapping.getSourceToTargetKeyFields()); >+ boolean cascadeDelete = false; >+ // Find mappedBy target mapping to check constraint cascade. >+ for (DatabaseField foreignKey : mapping.getSourceToTargetKeyFields().values()) { >+ DatabaseMapping mappedBy = mapping.getReferenceDescriptor().getObjectBuilder().getMappingForField(foreignKey); >+ if (mappedBy.isOneToOneMapping()) { >+ cascadeDelete = ((OneToOneMapping)mappedBy).isCascadeOnDeleteSetOnDatabase(); >+ } else { >+ List<DatabaseMapping> readOnlyMappings = mapping.getReferenceDescriptor().getObjectBuilder().getReadOnlyMappingsForField(foreignKey); >+ if (readOnlyMappings != null) { >+ for (DatabaseMapping mappedByPK : readOnlyMappings) { >+ if (mappedByPK.isOneToOneMapping()) { >+ cascadeDelete = ((OneToOneMapping)mappedByPK).isCascadeOnDeleteSetOnDatabase(); >+ if (cascadeDelete) { >+ break; >+ } >+ } >+ } >+ } >+ } >+ if (cascadeDelete) { >+ break; >+ } >+ } >+ addForeignMappingFkConstraint(mapping.getSourceToTargetKeyFields(), cascadeDelete); > } > >- private void addForeignKeyFieldToSourceTargetTable(OneToManyMapping mapping) { >+ protected void addForeignKeyFieldToSourceTargetTable(OneToManyMapping mapping) { > if (mapping.getDescriptor().hasTablePerClassPolicy() > && mapping.getDescriptor().getTablePerClassPolicy().hasChild()) { > return; > } >- addForeignMappingFkConstraint(mapping.getTargetForeignKeysToSourceKeys()); >+ addForeignMappingFkConstraint(mapping.getTargetForeignKeysToSourceKeys(), mapping.isCascadeOnDeleteSetOnDatabase()); > if(mapping.getListOrderField() != null) { >- getTableDefFromDBTable(mapping.getListOrderField().getTable()).addField(getFieldDefFromDBField(mapping.getListOrderField(), false)); >+ getTableDefFromDBTable(mapping.getListOrderField().getTable()).addField(getFieldDefFromDBField(mapping.getListOrderField())); > } > } > >- private void addForeignMappingFkConstraint(final Map<DatabaseField, DatabaseField> srcFields) { >+ protected void addForeignMappingFkConstraint(final Map<DatabaseField, DatabaseField> srcFields, boolean cascadeOnDelete) { > // srcFields map from the foreign key field to the target key field > > if(srcFields.size() == 0) { > return; > } > >- List<DatabaseField> fkFields = new Vector<DatabaseField>(); >- List<DatabaseField> targetFields = new Vector<DatabaseField>(); >+ List<DatabaseField> fkFields = new ArrayList<DatabaseField>(); >+ List<DatabaseField> targetFields = new ArrayList<DatabaseField>(); > > for (DatabaseField fkField : srcFields.keySet()) { > fkFields.add(fkField); > targetFields.add(srcFields.get(fkField)); > } >- addJoinColumnsFkConstraint(fkFields, targetFields); >+ addJoinColumnsFkConstraint(fkFields, targetFields, cascadeOnDelete); > } > > /** > * Build a table definition object from a database table object > */ >- private TableDefinition getTableDefFromDBTable(DatabaseTable databaseTable) { >+ protected TableDefinition getTableDefFromDBTable(DatabaseTable databaseTable) { > TableDefinition tableDefinition = this.tableMap.get(databaseTable.getName()); > > if (tableDefinition == null) { >@@ -634,7 +683,7 @@ > * Resolve the foreign key database field metadata in relation table or direct collection/map table. > * Those metadata includes type, and maybe dbtype/size/subsize if DatabaseField carries those info. > */ >- private DatabaseField resolveDatabaseField(DatabaseField childField, DatabaseField parentField) { >+ protected DatabaseField resolveDatabaseField(DatabaseField childField, DatabaseField parentField) { > //set through the type from the source table key field to the relation or direct collection table key field. > DatabaseField resolvedDatabaseField = new DatabaseField(); > // find original field in the parent table, which contains actual type definitions >@@ -679,7 +728,7 @@ > /** > * Build a field definition object from a database field. > */ >- private FieldDefinition getFieldDefFromDBField(DatabaseField dbField, boolean isPrimaryKey) { >+ protected FieldDefinition getFieldDefFromDBField(DatabaseField dbField) { > FieldDefinition fieldDef = this.fieldMap.get(dbField); > > if (fieldDef == null) { >@@ -723,65 +772,53 @@ > fieldDef.setShouldAllowNull(dbField.isNullable()); > fieldDef.setUnique(dbField.isUnique()); > } >- >- fieldDef.setIsPrimaryKey(isPrimaryKey); >- fieldMap.put(dbField, fieldDef); >- databaseFields.put(dbField, dbField); >+ this.fieldMap.put(dbField, fieldDef); >+ this.databaseFields.put(dbField, dbField); > } > > return fieldDef; > } >- >- /** >- * Build a field definition object from a database field. >- */ >- private FieldDefinition getDirectCollectionReferenceKeyFieldDefFromDBField(DatabaseField dbField) { >- FieldDefinition fieldDef = (FieldDefinition)getFieldDefFromDBField(dbField, true).clone(); >- //direct collection/map table reference kye filed is not unique, need to set it as non-pk. >- fieldDef.setIsPrimaryKey(false); >- return fieldDef; >- } > > /** > * Build and add a field definition object to relation table > */ >- private void setFieldToRelationTable(DatabaseField dbField, TableDefinition tblDef) { >- FieldDefinition fieldDef = getFieldDefFromDBField(dbField, false); >+ protected void setFieldToRelationTable(DatabaseField dbField, TableDefinition table) { >+ FieldDefinition fieldDef = getFieldDefFromDBField(dbField); > >- if (!tblDef.getFields().contains(fieldDef)) { >+ if (!table.getFields().contains(fieldDef)) { > //only add the field once, to avoid add twice if m:m is bi-directional. >- tblDef.addField(getFieldDefFromDBField(dbField, false)); >+ table.addField(getFieldDefFromDBField(dbField)); > fieldDef.setIsPrimaryKey(true); // make this a PK as we will be creating constrains later > } > } > >- private void processAdditionalTablePkFields(ClassDescriptor desc) { >+ protected void processAdditionalTablePkFields(ClassDescriptor descriptor) { > // only if there are additional tables >- if (!desc.hasMultipleTables()) { >+ if (!descriptor.hasMultipleTables()) { > return; > } > >- DatabaseTable dbTbl = null; >- Iterator dbTblIter = desc.getTables().iterator(); >+ DatabaseTable databaseTable = null; >+ Iterator dbTblIter = descriptor.getTables().iterator(); > while (dbTblIter.hasNext()) { >- dbTbl = (DatabaseTable) dbTblIter.next(); >- Map<DatabaseField, DatabaseField> srcFields = desc.getAdditionalTablePrimaryKeyFields().get(dbTbl); >+ databaseTable = (DatabaseTable) dbTblIter.next(); >+ Map<DatabaseField, DatabaseField> srcFields = descriptor.getAdditionalTablePrimaryKeyFields().get(databaseTable); > if ((null != srcFields) && srcFields.size() > 0) { > // srcFields is from the secondary field to the primary key field > // Let's make fk constraint from the secondary field to the primary key field >- List<DatabaseField> fkFields = new Vector<DatabaseField>(); >- List<DatabaseField> pkFields = new Vector<DatabaseField>(); >+ List<DatabaseField> fkFields = new ArrayList<DatabaseField>(); >+ List<DatabaseField> pkFields = new ArrayList<DatabaseField>(); > > for (DatabaseField pkField : srcFields.keySet()) { > pkFields.add(pkField); > fkFields.add(srcFields.get(pkField)); > } >- addJoinColumnsFkConstraint(fkFields, pkFields); >+ addJoinColumnsFkConstraint(fkFields, pkFields, descriptor.isCascadeOnDeleteSetOnDatabaseOnSecondaryTables()); > } > } > } > >- private void addJoinColumnsFkConstraint(List<DatabaseField> fkFields, List<DatabaseField> targetFields) { >+ protected void addJoinColumnsFkConstraint(List<DatabaseField> fkFields, List<DatabaseField> targetFields, boolean cascadeOnDelete) { > assert fkFields.size() == targetFields.size(); > > if (fkFields.size() == 0) { >@@ -790,8 +827,8 @@ > > DatabaseField fkField = null; > DatabaseField targetField = null; >- Vector<String> fkFieldNames = new Vector(); >- Vector<String> targetFieldNames = new Vector(); >+ List<String> fkFieldNames = new ArrayList(); >+ List<String> targetFieldNames = new ArrayList(); > > DatabaseTable sourceTable = fkFields.get(0).getTable(); > TableDefinition sourceTableDef = getTableDefFromDBTable(sourceTable); >@@ -808,7 +845,7 @@ > if (targetFieldDef != null) { > // UnidirectionalOneToOneMapping case > if (fkFieldDef == null) { >- fkFieldDef = getFieldDefFromDBField(fkField, false); >+ fkFieldDef = getFieldDefFromDBField(fkField); > if (!sourceTableDef.getFields().contains(fkFieldDef)) { > sourceTableDef.addField(fkFieldDef); > } >@@ -827,14 +864,14 @@ > DatabaseTable targetTable = targetField.getTable(); > TableDefinition targetTableDef = getTableDefFromDBTable(targetTable); > >- addForeignKeyConstraint(sourceTableDef, targetTableDef, fkFieldNames, targetFieldNames); >+ addForeignKeyConstraint(sourceTableDef, targetTableDef, fkFieldNames, targetFieldNames, cascadeOnDelete); > } > > /** > * Add a foreign key constraint to the source table. > */ >- private void addForeignKeyConstraint(TableDefinition sourceTableDef, TableDefinition targetTableDef, >- Vector<String> fkFields, Vector<String> targetFields) { >+ protected void addForeignKeyConstraint(TableDefinition sourceTableDef, TableDefinition targetTableDef, >+ List<String> fkFields, List<String> targetFields, boolean cascadeOnDelete) { > > // Only generate FK constraints if instructed to > if (! this.generateFKConstraints){ >@@ -844,8 +881,8 @@ > > // target keys could be primary keys or candidate(unique) keys of the target table > >- Vector<String> fkFieldNames = fkFields; >- Vector<String> targetFieldNames = targetFields; >+ List<String> fkFieldNames = fkFields; >+ List<String> targetFieldNames = targetFields; > > if (fkFields.size() > 1) { > // if composite key, we should consider the order of keys. >@@ -871,8 +908,8 @@ > targetToFkField.put(targetField, fkFields.get(index)); > } > >- Vector<String> orderedFkFields = new Vector<String>(fkFields.size()); >- Vector<String> orderedTargetFields = new Vector<String>(targetFields.size()); >+ List<String> orderedFkFields = new ArrayList<String>(fkFields.size()); >+ List<String> orderedTargetFields = new ArrayList<String>(targetFields.size()); > > if (!error) { > // if target fields are primary keys >@@ -892,8 +929,8 @@ > if (!error && !resolved) { > // if target fields are unique keys > for (UniqueKeyConstraint uniqueConstraint : targetTableDef.getUniqueKeys()) { >- orderedFkFields.setSize(0); >- orderedTargetFields.setSize(0); >+ orderedFkFields.clear(); >+ orderedTargetFields.clear(); > > resolved = true; > for (String ukField : uniqueConstraint.getSourceFields()) { >@@ -921,12 +958,13 @@ > // For bidirectional relationships both side of mapping will make the same FK constraint twice. > // TableDefinition.addForeignKeyConstraint() will ignore the same FK constraint. > >- ForeignKeyConstraint fkc = sourceTableDef.buildForeignKeyConstraint(fkFieldNames, targetFieldNames, >- targetTableDef, databasePlatform); >- sourceTableDef.addForeignKeyConstraint(fkc); >+ ForeignKeyConstraint constraint = sourceTableDef.buildForeignKeyConstraint(fkFieldNames, targetFieldNames, >+ targetTableDef, this.databasePlatform); >+ constraint.setShouldCascadeOnDelete(cascadeOnDelete); >+ sourceTableDef.addForeignKeyConstraint(constraint); > } > >- private void addUniqueKeyConstraints(TableDefinition sourceTableDef, Map<String, List<List<String>>> uniqueConstraintsMap) { >+ protected void addUniqueKeyConstraints(TableDefinition sourceTableDef, Map<String, List<List<String>>> uniqueConstraintsMap) { > int serialNumber = -1; > > for (String name : uniqueConstraintsMap.keySet()) { >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/ForeignKeyConstraint.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/ForeignKeyConstraint.java (revision 8109) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/ForeignKeyConstraint.java (working copy) >@@ -24,15 +24,15 @@ > */ > public class ForeignKeyConstraint implements Serializable { > protected String name; >- protected Vector sourceFields; >- protected Vector targetFields; >+ protected List<String> sourceFields; >+ protected List<String> targetFields; > protected String targetTable; > protected boolean shouldCascadeOnDelete; > > public ForeignKeyConstraint() { > this.name = ""; >- this.sourceFields = new Vector(); >- this.targetFields = new Vector(); >+ this.sourceFields = new ArrayList(); >+ this.targetFields = new ArrayList(); > this.targetTable = ""; > this.shouldCascadeOnDelete = false; > } >@@ -40,17 +40,17 @@ > public ForeignKeyConstraint(String name, String sourceField, String targetField, String targetTable) { > this(); > this.name = name; >- sourceFields.addElement(sourceField); >- targetFields.addElement(targetField); >+ sourceFields.add(sourceField); >+ targetFields.add(targetField); > this.targetTable = targetTable; > } > > public void addSourceField(String sourceField) { >- getSourceFields().addElement(sourceField); >+ getSourceFields().add(sourceField); > } > > public void addTargetField(String targetField) { >- getTargetFields().addElement(targetField); >+ getTargetFields().add(targetField); > } > > /** >@@ -60,20 +60,18 @@ > public void appendDBString(Writer writer, AbstractSession session) { > try { > writer.write("FOREIGN KEY ("); >- for (Enumeration sourceEnum = getSourceFields().elements(); >- sourceEnum.hasMoreElements();) { >- writer.write((String)sourceEnum.nextElement()); >- if (sourceEnum.hasMoreElements()) { >+ for (Iterator iterator = getSourceFields().iterator(); iterator.hasNext();) { >+ writer.write((String)iterator.next()); >+ if (iterator.hasNext()) { > writer.write(", "); > } > } > writer.write(") REFERENCES "); > writer.write(getTargetTable()); > writer.write(" ("); >- for (Enumeration targetEnum = getTargetFields().elements(); >- targetEnum.hasMoreElements();) { >- writer.write((String)targetEnum.nextElement()); >- if (targetEnum.hasMoreElements()) { >+ for (Iterator iterator = getTargetFields().iterator(); iterator.hasNext();) { >+ writer.write((String)iterator.next()); >+ if (iterator.hasNext()) { > writer.write(", "); > } > } >@@ -107,11 +105,11 @@ > return name; > } > >- public Vector getSourceFields() { >+ public List<String> getSourceFields() { > return sourceFields; > } > >- public Vector getTargetFields() { >+ public List<String> getTargetFields() { > return targetFields; > } > >@@ -132,11 +130,11 @@ > this.shouldCascadeOnDelete = shouldCascadeOnDelete; > } > >- public void setSourceFields(Vector sourceFields) { >+ public void setSourceFields(List<String> sourceFields) { > this.sourceFields = sourceFields; > } > >- public void setTargetFields(Vector targetFields) { >+ public void setTargetFields(List<String> targetFields) { > this.targetFields = targetFields; > } > >Index: foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/TableDefinition.java >=================================================================== >--- foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/TableDefinition.java (revision 8110) >+++ foundation/org.eclipse.persistence.core/src/org/eclipse/persistence/tools/schemaframework/TableDefinition.java (working copy) >@@ -444,7 +444,7 @@ > /** > * Build a foreign key constraint. > */ >- protected ForeignKeyConstraint buildForeignKeyConstraint(List fkFieldNames, List pkFieldNames, TableDefinition targetTable, DatabasePlatform platform) { >+ protected ForeignKeyConstraint buildForeignKeyConstraint(List<String> fkFieldNames, List<String> pkFieldNames, TableDefinition targetTable, DatabasePlatform platform) { > assert fkFieldNames.size() > 0 && fkFieldNames.size() == pkFieldNames.size(); > > ForeignKeyConstraint fkConstraint = new ForeignKeyConstraint(); >@@ -454,7 +454,7 @@ > } > > fkConstraint.setTargetTable(targetTable.getFullName()); >- String fkFieldName = (String)fkFieldNames.get(0); >+ String fkFieldName = fkFieldNames.get(0); > String name = buildForeignKeyConstraintName(this.getName(), fkFieldName, platform.getMaxForeignKeyNameSize(), platform); > > fkConstraint.setName(name); >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/Man.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/Man.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/Man.java (working copy) >@@ -14,6 +14,7 @@ > > import javax.persistence.*; > >+import static javax.persistence.CascadeType.ALL; > import static javax.persistence.GenerationType.*; > > @Entity >@@ -23,48 +24,49 @@ > private String lastName; > private PartnerLink partnerLink; > >- public Man() {} >- >+ public Man() { >+ } >+ > public Man(String firstName, String lastName) { > this(); > this.firstName = firstName; > this.lastName = lastName; > } >- >+ > @Id >- @GeneratedValue(strategy=IDENTITY) >- public int getId() { >- return id; >+ @GeneratedValue(strategy = IDENTITY) >+ public int getId() { >+ return id; > } > >- @Column(name="F_NAME") >- public String getFirstName() { >- return firstName; >+ @Column(name = "F_NAME") >+ public String getFirstName() { >+ return firstName; > } >- >- public void setFirstName(String name) { >- this.firstName = name; >+ >+ public void setFirstName(String name) { >+ this.firstName = name; > } > >- @Column(name="L_NAME") >- public String getLastName() { >- return lastName; >+ @Column(name = "L_NAME") >+ public String getLastName() { >+ return lastName; > } >- >- public void setLastName(String name) { >- this.lastName = name; >+ >+ public void setLastName(String name) { >+ this.lastName = name; > } >- >- @OneToOne(mappedBy="man") >- public PartnerLink getPartnerLink() { >- return partnerLink; >+ >+ @OneToOne(mappedBy = "man", cascade = ALL) >+ public PartnerLink getPartnerLink() { >+ return partnerLink; > } >- >- public void setId(int id) { >- this.id = id; >+ >+ public void setId(int id) { >+ this.id = id; > } >- >- public void setPartnerLink(PartnerLink partnerLink) { >- this.partnerLink = partnerLink; >+ >+ public void setPartnerLink(PartnerLink partnerLink) { >+ this.partnerLink = partnerLink; > } > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/PartnerLink.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/PartnerLink.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/PartnerLink.java (working copy) >@@ -18,47 +18,50 @@ > import static javax.persistence.FetchType.*; > > @Entity >-@Table(name="MW") >+@Table(name = "MW") > @IdClass(org.eclipse.persistence.testing.models.jpa.advanced.PartnerLinkPK.class) > public class PartnerLink { > private Man man; > private Woman woman; > >- public PartnerLink() {} >- @Id >- @OneToOne(cascade=PERSIST, fetch=LAZY) >- @JoinColumn(name="M") >- public Man getMan() { >- return man; >+ public PartnerLink() { > } >+ >+ @Id >+ @OneToOne(cascade = PERSIST, fetch = LAZY) >+ @JoinColumn(name = "M") >+ public Man getMan() { >+ return man; >+ } >+ > @Transient >- public Integer getManId() { >+ public Integer getManId() { > return (getMan() == null) ? null : getMan().getId(); > } >- >+ > @Id >- @OneToOne(cascade=PERSIST, fetch=LAZY) >- @JoinColumn(name="W") >- public Woman getWoman() { >- return woman; >+ @OneToOne(cascade = PERSIST, fetch = LAZY) >+ @JoinColumn(name = "W") >+ public Woman getWoman() { >+ return woman; > } >- >+ > @Transient >- public Integer getWomanId() { >+ public Integer getWomanId() { > return (getWoman() == null) ? null : getWoman().getId(); > } >- >- public void setMan(Man man) { >- this.man = man; >+ >+ public void setMan(Man man) { >+ this.man = man; > } >- >- public void setManId(Integer manId) { >+ >+ public void setManId(Integer manId) { > } >- >- public void setWoman(Woman woman) { >- this.woman = woman; >+ >+ public void setWoman(Woman woman) { >+ this.woman = woman; > } >- >- public void setWomanId(Integer womanId) { >+ >+ public void setWomanId(Integer womanId) { > } > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/Woman.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/Woman.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/advanced/Woman.java (working copy) >@@ -23,48 +23,49 @@ > private String lastName; > private PartnerLink partnerLink; > >- public Woman() {} >- >+ public Woman() { >+ } >+ > public Woman(String firstName, String lastName) { > this(); > this.firstName = firstName; > this.lastName = lastName; > } >- >+ > @Id >- @GeneratedValue(strategy=IDENTITY) >- public Integer getId() { >- return id; >+ @GeneratedValue(strategy = IDENTITY) >+ public Integer getId() { >+ return id; > } >- >- @Column(name="F_NAME") >- public String getFirstName() { >- return firstName; >+ >+ @Column(name = "F_NAME") >+ public String getFirstName() { >+ return firstName; > } >- >- public void setFirstName(String name) { >- this.firstName = name; >+ >+ public void setFirstName(String name) { >+ this.firstName = name; > } > >- @Column(name="L_NAME") >- public String getLastName() { >- return lastName; >+ @Column(name = "L_NAME") >+ public String getLastName() { >+ return lastName; > } >- >- public void setLastName(String name) { >- this.lastName = name; >+ >+ public void setLastName(String name) { >+ this.lastName = name; > } > >- @OneToOne(mappedBy="woman") >- public PartnerLink getPartnerLink() { >- return partnerLink; >+ @OneToOne(mappedBy = "woman") >+ public PartnerLink getPartnerLink() { >+ return partnerLink; > } >- >- public void setId(Integer id) { >- this.id = id; >+ >+ public void setId(Integer id) { >+ this.id = id; > } >- >- public void setPartnerLink(PartnerLink partnerLink) { >- this.partnerLink = partnerLink; >+ >+ public void setPartnerLink(PartnerLink partnerLink) { >+ this.partnerLink = partnerLink; > } > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityB.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityB.java (revision 8110) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityB.java (working copy) >@@ -19,6 +19,7 @@ > > import javax.persistence.*; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; > import org.eclipse.persistence.annotations.Index; > > import java.util.Collection; >@@ -48,12 +49,14 @@ > > > @OneToMany(mappedBy="bs") >+ @CascadeOnDelete > private Collection<CKeyEntityA> as; > > @ManyToMany(mappedBy="bs") > private Collection<CKeyEntityC> cs; > > @OneToOne(mappedBy="uniqueB") >+ @CascadeOnDelete > private CKeyEntityA uniqueA; > > @OneToMany(cascade={PERSIST, MERGE}) >@@ -61,6 +64,7 @@ > @JoinColumn(name="FK_SEQ", referencedColumnName="SEQ"), > @JoinColumn(name="FK_CODE", referencedColumnName="CODE") > }) >+ @CascadeOnDelete > private List<Comment<String>> comments; > > @ManyToOne >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityB2.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityB2.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityB2.java (working copy) >@@ -17,6 +17,8 @@ > import javax.persistence.PrimaryKeyJoinColumns; > import javax.persistence.Table; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; >+ > /** > * @author Wonseok Kim > */ >@@ -26,6 +28,7 @@ > @PrimaryKeyJoinColumn(name="SEQ", referencedColumnName="SEQ"), > @PrimaryKeyJoinColumn(name="CODE", referencedColumnName="CODE") > }) >+@CascadeOnDelete > public class CKeyEntityB2 extends CKeyEntityB { > > public CKeyEntityB2() { >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityC.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityC.java (revision 8110) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/CKeyEntityC.java (working copy) >@@ -33,6 +33,7 @@ > */ > @Entity > @Table(name = "DDL_CKENTC") >+@Index(name="INDEX_BS", table="DDL_CKENT_C_B", columnNames={"C_SEQ", "C_ROLE"}) > public class CKeyEntityC { > > @EmbeddedId >@@ -62,7 +63,6 @@ > @JoinColumn(name="B_CODE", referencedColumnName = "CODE") > } > ) >- @Index(name="INDEX_BS", table="DDL_CKENT_C_B", columnNames={"C_SEQ", "C_ROLE"}) > private Collection<CKeyEntityB> bs; > > >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/MapHolder.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/MapHolder.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/MapHolder.java (working copy) >@@ -18,6 +18,8 @@ > import java.util.Map; > import javax.persistence.*; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; >+ > @Entity > @Table(name="DDL_MAP_HOLDER") > public class MapHolder { >@@ -46,6 +48,7 @@ > } > > @ElementCollection >+ @CascadeOnDelete > public Map<EntityMapKey, String> getDCMap() { > return directCollectionMap; > } >@@ -54,6 +57,7 @@ > } > > @ElementCollection >+ @CascadeOnDelete > public Map<EntityMapKey, AggregateMapValue> getACMap() { > return aggregateCollectionMap; > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/PatentCollection.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/PatentCollection.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/PatentCollection.java (working copy) >@@ -15,13 +15,16 @@ > import javax.persistence.Embeddable; > import javax.persistence.ManyToMany; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; >+ > import java.util.ArrayList; > import java.util.List; > > @Embeddable > public class PatentCollection { > >- @ManyToMany >+ @ManyToMany >+ @CascadeOnDelete > private List<Patent> patents = new ArrayList<Patent>(); > > public List<Patent> getPatents() { >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/PropertyRecord.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/PropertyRecord.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/ddlgeneration/PropertyRecord.java (working copy) >@@ -24,6 +24,8 @@ > import javax.persistence.GeneratedValue; > import javax.persistence.Id; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; >+ > @Entity > public class PropertyRecord { > @Id >@@ -41,6 +43,7 @@ > column=@Column(name="ASSESSMENT")) > }) > @ElementCollection >+ @CascadeOnDelete > public Map<Address, PropertyInfo> propertyInfos = new HashMap<Address, PropertyInfo>(); > } > >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/AdvancedTableCreator.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/AdvancedTableCreator.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/AdvancedTableCreator.java (working copy) >@@ -27,13 +27,12 @@ > addTableDefinition(buildCREDITCARDSTable()); > addTableDefinition(buildDEPTTable()); > addTableDefinition(buildDEPT_EMPTable()); >- addTableDefinition(buildEMPLOYEETable()); > addTableDefinition(buildEQUIPMENTTable()); > addTableDefinition(buildEQUIPMENTCODETable()); > addTableDefinition(buildGOLFERTable()); > addTableDefinition(buildLARGEPROJECTTable()); >- addTableDefinition(buildMANTable()); > addTableDefinition(buildPARTNERLINKTable()); >+ addTableDefinition(buildMANTable()); > addTableDefinition(buildPHONENUMBERTable()); > addTableDefinition(buildPHONENUMBERSTATUSTable()); > addTableDefinition(buildPLATINUMBUYERTable()); >@@ -43,12 +42,11 @@ > addTableDefinition(buildSALARYTable()); > addTableDefinition(buildVEGETABLETable()); > addTableDefinition(buildWOMANTable()); >- addTableDefinition(buildWORKWEEKTable()); >+ addTableDefinition(buildWORKWEEKTable()); > addTableDefinition(buildWORLDRANKTable()); > addTableDefinition(buildPARENTTable()); > addTableDefinition(buildCHILDTable()); >- >- //addTableDefinition(buildEMPLOYEE_SEQTable()); >+ addTableDefinition(buildEMPLOYEETable()); > } > > public static TableDefinition buildADDRESSTable() { >@@ -355,36 +353,6 @@ > > return table; > } >- >- public static TableDefinition buildEMPLOYEE_SEQTable() { >- TableDefinition table = new TableDefinition(); >-// table.setName("SEQUENCE"); >- table.setName("CMP3_FA_EMPLOYEE_SEQ"); >- >- FieldDefinition fieldSEQ_COUNT = new FieldDefinition(); >- fieldSEQ_COUNT.setName("SEQ_COUNT"); >- fieldSEQ_COUNT.setTypeName("NUMBER"); >- fieldSEQ_COUNT.setSize(15); >- fieldSEQ_COUNT.setSubSize(0); >- fieldSEQ_COUNT.setIsPrimaryKey(false); >- fieldSEQ_COUNT.setIsIdentity(false); >- fieldSEQ_COUNT.setUnique(false); >- fieldSEQ_COUNT.setShouldAllowNull(false); >- table.addField(fieldSEQ_COUNT); >- >- FieldDefinition fieldSEQ_NAME = new FieldDefinition(); >- fieldSEQ_NAME.setName("SEQ_NAME"); >- fieldSEQ_NAME.setTypeName("VARCHAR2"); >- fieldSEQ_NAME.setSize(80); >- fieldSEQ_NAME.setSubSize(0); >- fieldSEQ_NAME.setIsPrimaryKey(true); >- fieldSEQ_NAME.setIsIdentity(false); >- fieldSEQ_NAME.setUnique(false); >- fieldSEQ_NAME.setShouldAllowNull(false); >- table.addField(fieldSEQ_NAME); >- >- return table; >- } > > public static TableDefinition buildEMPLOYEETable() { > TableDefinition table = new TableDefinition(); >@@ -396,7 +364,6 @@ > field.setSize(15); > field.setShouldAllowNull(false); > field.setIsPrimaryKey(true); >- field.setUnique(false); > field.setIsIdentity(true); > table.addField(field); > >@@ -404,60 +371,36 @@ > field1.setName("F_NAME"); > field1.setTypeName("VARCHAR"); > field1.setSize(40); >- field1.setShouldAllowNull(true); >- field1.setIsPrimaryKey(false); >- field1.setUnique(false); >- field1.setIsIdentity(false); > table.addField(field1); > > FieldDefinition field2 = new FieldDefinition(); > field2.setName("L_NAME"); > field2.setTypeName("VARCHAR"); > field2.setSize(40); >- field2.setShouldAllowNull(true); >- field2.setIsPrimaryKey(false); >- field2.setUnique(false); >- field2.setIsIdentity(false); > table.addField(field2); > > FieldDefinition fieldGender = new FieldDefinition(); > fieldGender.setName("GENDER"); > fieldGender.setTypeName("VARCHAR"); > fieldGender.setSize(1); >- fieldGender.setShouldAllowNull(true); >- fieldGender.setIsPrimaryKey(false); >- fieldGender.setUnique(false); >- fieldGender.setIsIdentity(false); > table.addField(fieldGender); > > FieldDefinition field3 = new FieldDefinition(); > field3.setName("START_DATE"); > field3.setTypeName("DATE"); > field3.setSize(23); >- field3.setShouldAllowNull(true); >- field3.setIsPrimaryKey(false); >- field3.setUnique(false); >- field3.setIsIdentity(false); > table.addField(field3); > > FieldDefinition field4 = new FieldDefinition(); > field4.setName("END_DATE"); > field4.setTypeName("DATE"); > field4.setSize(23); >- field4.setShouldAllowNull(true); >- field4.setIsPrimaryKey(false); >- field4.setUnique(false); >- field4.setIsIdentity(false); > table.addField(field4); > > FieldDefinition field8 = new FieldDefinition(); > field8.setName("ADDR_ID"); > field8.setTypeName("NUMERIC"); > field8.setSize(15); >- field8.setShouldAllowNull(true); >- field8.setIsPrimaryKey(false); >- field8.setUnique(false); >- field8.setIsIdentity(false); > field8.setForeignKeyFieldName("CMP3_FA_ADDRESS.ADDRESS_ID"); > table.addField(field8); > >@@ -465,10 +408,6 @@ > field9.setName("MANAGER_EMP_ID"); > field9.setTypeName("NUMERIC"); > field9.setSize(15); >- field9.setShouldAllowNull(true); >- field9.setIsPrimaryKey(false); >- field9.setUnique(false); >- field9.setIsIdentity(false); > field9.setForeignKeyFieldName("CMP3_FA_EMPLOYEE.EMP_ID"); > table.addField(field9); > >@@ -476,20 +415,12 @@ > field10.setName("VERSION"); > field10.setTypeName("NUMERIC"); > field10.setSize(15); >- field10.setShouldAllowNull(true); >- field10.setIsPrimaryKey(false); >- field10.setUnique(false); >- field10.setIsIdentity(false); > table.addField(field10); > > FieldDefinition fieldDEPT = new FieldDefinition(); > fieldDEPT.setName("DEPT_ID"); > fieldDEPT.setTypeName("NUMERIC"); > fieldDEPT.setSize(15); >- fieldDEPT.setShouldAllowNull(true); >- fieldDEPT.setIsPrimaryKey(false); >- fieldDEPT.setUnique(false); >- fieldDEPT.setIsIdentity(false); > fieldDEPT.setForeignKeyFieldName("CMP3_FA_DEPT.ID"); > table.addField(fieldDEPT); > >@@ -497,20 +428,12 @@ > fieldStatus.setName("STATUS"); > fieldStatus.setTypeName("NUMERIC"); > fieldStatus.setSize(15); >- fieldStatus.setIsPrimaryKey(false); >- fieldStatus.setUnique(false); >- fieldStatus.setIsIdentity(false); >- fieldStatus.setShouldAllowNull(true); > table.addField(fieldStatus); > > FieldDefinition fieldPayScale = new FieldDefinition(); > fieldPayScale.setName("PAY_SCALE"); > fieldPayScale.setTypeName("VARCHAR"); > fieldPayScale.setSize(40); >- fieldPayScale.setIsPrimaryKey(false); >- fieldPayScale.setUnique(false); >- fieldPayScale.setIsIdentity(false); >- fieldPayScale.setShouldAllowNull(true); > table.addField(fieldPayScale); > > FieldDefinition fieldRoomNumber = new FieldDefinition(); >@@ -518,10 +441,6 @@ > fieldRoomNumber.setTypeName("NUMBER"); > fieldRoomNumber.setSize(15); > fieldRoomNumber.setSubSize(0); >- fieldRoomNumber.setIsPrimaryKey(false); >- fieldRoomNumber.setIsIdentity(false); >- fieldRoomNumber.setUnique(false); >- fieldRoomNumber.setShouldAllowNull(true); > table.addField(fieldRoomNumber); > > // SECTION: FIELD >@@ -529,10 +448,6 @@ > fieldFormerCompany.setName("FORMER_COMPANY"); > fieldFormerCompany.setTypeName("VARCHAR"); > fieldFormerCompany.setSize(40); >- fieldFormerCompany.setShouldAllowNull(true ); >- fieldFormerCompany.setIsPrimaryKey(false ); >- fieldFormerCompany.setUnique(false ); >- fieldFormerCompany.setIsIdentity(false ); > table.addField(fieldFormerCompany); > > // SECTION: FIELD >@@ -540,10 +455,6 @@ > fieldFormerStartDate.setName("FORMER_START_DATE"); > fieldFormerStartDate.setTypeName("DATE"); > fieldFormerStartDate.setSize(23); >- fieldFormerStartDate.setShouldAllowNull(true ); >- fieldFormerStartDate.setIsPrimaryKey(false ); >- fieldFormerStartDate.setUnique(false ); >- fieldFormerStartDate.setIsIdentity(false ); > table.addField(fieldFormerStartDate); > > // SECTION: FIELD >@@ -551,40 +462,24 @@ > fieldFormerEndDate.setName("FORMER_END_DATE"); > fieldFormerEndDate.setTypeName("DATE"); > fieldFormerEndDate.setSize(23); >- fieldFormerEndDate.setShouldAllowNull(true ); >- fieldFormerEndDate.setIsPrimaryKey(false ); >- fieldFormerEndDate.setUnique(false ); >- fieldFormerEndDate.setIsIdentity(false ); > table.addField(fieldFormerEndDate); > > FieldDefinition fieldISMANAGER = new FieldDefinition(); > fieldISMANAGER.setName("ISMANAGER"); > fieldISMANAGER.setTypeName("VARCHAR"); > fieldISMANAGER.setSize(5); >- fieldISMANAGER.setShouldAllowNull(true); >- fieldISMANAGER.setIsPrimaryKey(false); >- fieldISMANAGER.setUnique(false); >- fieldISMANAGER.setIsIdentity(false); > table.addField(fieldISMANAGER); > > FieldDefinition fieldGETMANAGER = new FieldDefinition(); > fieldGETMANAGER.setName("GETMANAGER"); > fieldGETMANAGER.setTypeName("VARCHAR"); > fieldGETMANAGER.setSize(5); >- fieldGETMANAGER.setShouldAllowNull(true); >- fieldGETMANAGER.setIsPrimaryKey(false); >- fieldGETMANAGER.setUnique(false); >- fieldGETMANAGER.setIsIdentity(false); > table.addField(fieldGETMANAGER); > > FieldDefinition fieldSETMANAGER = new FieldDefinition(); > fieldSETMANAGER.setName("SETMANAGER"); > fieldSETMANAGER.setTypeName("VARCHAR"); > fieldSETMANAGER.setSize(40); >- fieldSETMANAGER.setShouldAllowNull(true); >- fieldSETMANAGER.setIsPrimaryKey(false); >- fieldSETMANAGER.setUnique(false); >- fieldSETMANAGER.setIsIdentity(false); > table.addField(fieldSETMANAGER); > > /* ForeignKeyConstraint foreignKeyEMPLOYEE_ADDRESS = new ForeignKeyConstraint(); >@@ -719,9 +614,6 @@ > field.setSize(15); > field.setShouldAllowNull(false ); > field.setIsPrimaryKey(true ); >- field.setUnique(false ); >- field.setIsIdentity(false ); >- field.setForeignKeyFieldName("CMP3_FA_PROJECT.PROJ_ID"); > table.addField(field); > > // SECTION: FIELD >@@ -729,11 +621,15 @@ > field1.setName("BUDGET"); > field1.setTypeName("DOUBLE PRECIS"); > field1.setSize(18); >- field1.setShouldAllowNull(true ); >- field1.setIsPrimaryKey(false ); >- field1.setUnique(false ); >- field1.setIsIdentity(false ); > table.addField(field1); >+ >+ ForeignKeyConstraint foreignKey = new ForeignKeyConstraint(); >+ foreignKey.setName("FA_LPROJ_PROJ_FK"); >+ foreignKey.setTargetTable("CMP3_FA_PROJECT"); >+ foreignKey.addSourceField("PROJ_ID"); >+ foreignKey.addTargetField("PROJ_ID"); >+ foreignKey.setShouldCascadeOnDelete(true); >+ table.addForeignKeyConstraint(foreignKey); > > return table; > } >@@ -819,7 +715,6 @@ > fieldMID.setIsIdentity(false); > fieldMID.setUnique(false); > fieldMID.setShouldAllowNull(true); >- fieldMID.setForeignKeyFieldName("CMP3_FA_MAN.ID"); > table.addField(fieldMID); > > FieldDefinition fieldWID = new FieldDefinition(); >@@ -831,9 +726,24 @@ > fieldWID.setIsIdentity(false); > fieldWID.setUnique(false); > fieldWID.setShouldAllowNull(true); >- fieldWID.setForeignKeyFieldName("CMP3_FA_WOMAN.ID"); > table.addField(fieldWID); > >+ ForeignKeyConstraint foreignKey = new ForeignKeyConstraint(); >+ foreignKey.setName("FA_LINK_WN_FK"); >+ foreignKey.setTargetTable("CMP3_FA_WOMAN"); >+ foreignKey.addSourceField("W"); >+ foreignKey.addTargetField("ID"); >+ foreignKey.setShouldCascadeOnDelete(true); >+ table.addForeignKeyConstraint(foreignKey); >+ >+ foreignKey = new ForeignKeyConstraint(); >+ foreignKey.setName("FA_LINK_MN_FK"); >+ foreignKey.setTargetTable("CMP3_FA_MAN"); >+ foreignKey.addSourceField("M"); >+ foreignKey.addTargetField("ID"); >+ foreignKey.setShouldCascadeOnDelete(true); >+ table.addForeignKeyConstraint(foreignKey); >+ > return table; > } > >@@ -887,7 +797,6 @@ > field.setIsPrimaryKey(true ); > field.setUnique(false ); > field.setIsIdentity(false ); >- field.setForeignKeyFieldName("CMP3_FA_EMPLOYEE.EMP_ID"); > table.addField(field); > > // SECTION: FIELD >@@ -923,13 +832,14 @@ > field3.setIsIdentity(false ); > table.addField(field3); > >- /* ForeignKeyConstraint foreignKeyPHONE_OWNER = new ForeignKeyConstraint(); >- foreignKeyPHONE_OWNER.setName("PHONE_OWNER"); >+ ForeignKeyConstraint foreignKeyPHONE_OWNER = new ForeignKeyConstraint(); >+ foreignKeyPHONE_OWNER.setName("FA_PHONE_OW_FK"); > foreignKeyPHONE_OWNER.setTargetTable("CMP3_FA_EMPLOYEE"); >- foreignKeyPHONE_OWNER.addSourceField("EMP_ID"); >+ foreignKeyPHONE_OWNER.addSourceField("OWNER_ID"); > foreignKeyPHONE_OWNER.addTargetField("EMP_ID"); >+ foreignKeyPHONE_OWNER.setShouldCascadeOnDelete(true); > table.addForeignKeyConstraint(foreignKeyPHONE_OWNER); >-*/ >+ > return table; > } > >@@ -977,7 +887,6 @@ > field.setIsPrimaryKey(true ); > field.setUnique(false ); > field.setIsIdentity(false ); >- field.setForeignKeyFieldName("CMP3_FA_EMPLOYEE.EMP_ID"); > table.addField(field); > > // SECTION: FIELD >@@ -989,23 +898,23 @@ > field1.setIsPrimaryKey(true ); > field1.setUnique(false ); > field1.setIsIdentity(false ); >- field1.setForeignKeyFieldName("CMP3_FA_PROJECT.PROJ_ID"); > table.addField(field1); > >-/* ForeignKeyConstraint foreignKeyPROJECT_EMPLOYEE = new ForeignKeyConstraint(); >- foreignKeyPROJECT_EMPLOYEE.setName("PROJECT_EMPLOYEE"); >+ ForeignKeyConstraint foreignKeyPROJECT_EMPLOYEE = new ForeignKeyConstraint(); >+ foreignKeyPROJECT_EMPLOYEE.setName("FA_PROJ_EMP_FK"); > foreignKeyPROJECT_EMPLOYEE.setTargetTable("CMP3_FA_EMPLOYEE"); >- foreignKeyPROJECT_EMPLOYEE.addSourceField("EMP_ID"); >+ foreignKeyPROJECT_EMPLOYEE.addSourceField("EMPLOYEES_EMP_ID"); > foreignKeyPROJECT_EMPLOYEE.addTargetField("EMP_ID"); >+ foreignKeyPROJECT_EMPLOYEE.setShouldCascadeOnDelete(true); > table.addForeignKeyConstraint(foreignKeyPROJECT_EMPLOYEE); > > ForeignKeyConstraint foreignKeyEMPLOYEE_PROJECT = new ForeignKeyConstraint(); >- foreignKeyEMPLOYEE_PROJECT.setName("EMPLOYEE_PROJECT"); >+ foreignKeyEMPLOYEE_PROJECT.setName("FA_EMP_PROJP_FK"); > foreignKeyEMPLOYEE_PROJECT.setTargetTable("CMP3_FA_PROJECT"); >- foreignKeyEMPLOYEE_PROJECT.addSourceField("EMP_ID"); >- foreignKeyEMPLOYEE_PROJECT.addTargetField("EMP_ID"); >+ foreignKeyEMPLOYEE_PROJECT.addSourceField("projects_PROJ_ID"); >+ foreignKeyEMPLOYEE_PROJECT.addTargetField("PROJ_ID"); > table.addForeignKeyConstraint(foreignKeyEMPLOYEE_PROJECT); >-*/ >+ > return table; > } > >@@ -1102,10 +1011,6 @@ > field.setTypeName("NUMERIC"); > field.setSize(15); > field.setShouldAllowNull(false); >- field.setIsPrimaryKey(false); >- field.setUnique(false); >- field.setIsIdentity(false); >- field.setForeignKeyFieldName("CMP3_FA_EMPLOYEE.EMP_ID"); > table.addField(field); > > // SECTION: FIELD >@@ -1114,10 +1019,15 @@ > field1.setTypeName("VARCHAR"); > field1.setSize(200); > field1.setShouldAllowNull(false); >- field1.setIsPrimaryKey(false); >- field1.setUnique(false); >- field1.setIsIdentity(false); > table.addField(field1); >+ >+ ForeignKeyConstraint foreignKey = new ForeignKeyConstraint(); >+ foreignKey.setName("FA_EMP_RESP_FK"); >+ foreignKey.setTargetTable("CMP3_FA_EMPLOYEE"); >+ foreignKey.addSourceField("EMP_ID"); >+ foreignKey.addTargetField("EMP_ID"); >+ //foreignKey.setShouldCascadeOnDelete(true); >+ table.addForeignKeyConstraint(foreignKey); > > return table; > } >@@ -1132,10 +1042,6 @@ > fieldEMP_ID.setSize(15); > fieldEMP_ID.setSubSize(0); > fieldEMP_ID.setIsPrimaryKey(true); >- fieldEMP_ID.setIsIdentity(false); >- fieldEMP_ID.setUnique(false); >- fieldEMP_ID.setShouldAllowNull(false); >- fieldEMP_ID.setForeignKeyFieldName("CMP3_FA_EMPLOYEE.EMP_ID"); > table.addField(fieldEMP_ID); > > FieldDefinition fieldSALARY = new FieldDefinition(); >@@ -1143,11 +1049,15 @@ > fieldSALARY.setTypeName("NUMBER"); > fieldSALARY.setSize(15); > fieldSALARY.setSubSize(0); >- fieldSALARY.setIsPrimaryKey(false); >- fieldSALARY.setIsIdentity(false); >- fieldSALARY.setUnique(false); >- fieldSALARY.setShouldAllowNull(true); > table.addField(fieldSALARY); >+ >+ ForeignKeyConstraint foreignKey = new ForeignKeyConstraint(); >+ foreignKey.setName("FA_EMP_SAL_FK"); >+ foreignKey.setTargetTable("CMP3_FA_EMPLOYEE"); >+ foreignKey.addSourceField("EMP_ID"); >+ foreignKey.addTargetField("EMP_ID"); >+ foreignKey.setShouldCascadeOnDelete(true); >+ table.addForeignKeyConstraint(foreignKey); > > return table; > } >@@ -1225,7 +1135,6 @@ > fieldID.setIsPrimaryKey(false); > fieldID.setUnique(false); > fieldID.setIsIdentity(false); >- fieldID.setForeignKeyFieldName("CMP3_FA_EMPLOYEE.EMP_ID"); > table.addField(fieldID); > > FieldDefinition fieldWORKWEEK = new FieldDefinition(); >@@ -1237,6 +1146,14 @@ > fieldWORKWEEK.setIsIdentity(false); > fieldWORKWEEK.setShouldAllowNull(false); > table.addField(fieldWORKWEEK); >+ >+ ForeignKeyConstraint foreignKey = new ForeignKeyConstraint(); >+ foreignKey.setName("FA_EMP_WW_FK"); >+ foreignKey.setTargetTable("CMP3_FA_EMPLOYEE"); >+ foreignKey.addSourceField("EMP_ID"); >+ foreignKey.addTargetField("EMP_ID"); >+ foreignKey.setShouldCascadeOnDelete(true); >+ table.addForeignKeyConstraint(foreignKey); > > return table; > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Employee.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Employee.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Employee.java (working copy) >@@ -20,6 +20,7 @@ > > import org.eclipse.persistence.annotations.BasicCollection; > import org.eclipse.persistence.annotations.Cache; >+import org.eclipse.persistence.annotations.CascadeOnDelete; > import org.eclipse.persistence.annotations.CollectionTable; > import org.eclipse.persistence.annotations.Convert; > import org.eclipse.persistence.annotations.ConversionValue; >@@ -63,6 +64,7 @@ > @EntityListeners(org.eclipse.persistence.testing.models.jpa.fieldaccess.advanced.EmployeeListener.class) > @Table(name="CMP3_FA_EMPLOYEE") > @SecondaryTable(name="CMP3_FA_SALARY") >+@CascadeOnDelete > @PrimaryKeyJoinColumn(name="EMP_ID", referencedColumnName="EMP_ID") > @NamedNativeQuery( > name="findAllFieldAccessSQLEmployees", >@@ -206,6 +208,7 @@ > > @BasicCollection > @CollectionTable(name="CMP3_FA_EMP_WORKWEEK") >+ @CascadeOnDelete > private Set<Weekdays> workWeek; > > @ManyToMany(cascade={PERSIST, MERGE}) >@@ -216,6 +219,7 @@ > joinColumns=@JoinColumn(name="EMPLOYEES_EMP_ID", referencedColumnName="EMP_ID") > //inverseJoinColumns=@JoinColumn(name="PROJECTS_PROJ_ID", referencedColumnName="PROJ_ID") > ) >+ @CascadeOnDelete > private Collection<Project> projects; > > @BasicCollection(valueColumn=@Column(name="DESCRIPTION")) >@@ -224,16 +228,17 @@ > > @OneToMany(cascade=ALL, mappedBy="owner") > @PrivateOwned >+ @CascadeOnDelete > private Collection<PhoneNumber> phoneNumbers; > > @OneToMany(cascade=ALL, mappedBy="manager") > private Collection<Employee> managedEmployees; > > public Employee () { >- this.phoneNumbers = new Vector<PhoneNumber>(); >- this.projects = new Vector<Project>(); >- this.managedEmployees = new Vector<Employee>(); >- this.responsibilities = new Vector<String>(); >+ this.phoneNumbers = new ArrayList<PhoneNumber>(); >+ this.projects = new ArrayList<Project>(); >+ this.managedEmployees = new ArrayList<Employee>(); >+ this.responsibilities = new ArrayList<String>(); > } > > public Employee(String firstName, String lastName){ >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/LargeProject.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/LargeProject.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/LargeProject.java (working copy) >@@ -14,6 +14,8 @@ > > import javax.persistence.*; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; >+ > /** > * Local interface for the large project bean. > * This is the bean's public/local interface for the clients usage. >@@ -22,6 +24,7 @@ > */ > @Entity(name="LargeProject") > @Table(name="CMP3_FA_LPROJECT") >+@CascadeOnDelete > @DiscriminatorValue("L") > @NamedQueries({ > @NamedQuery( >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Man.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Man.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Man.java (working copy) >@@ -14,43 +14,48 @@ > > import javax.persistence.*; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; >+ >+import static javax.persistence.CascadeType.ALL; > import static javax.persistence.GenerationType.*; > >-@Entity(name="Man") >-@Table(name="CMP3_FA_MAN") >+@Entity(name = "Man") >+@Table(name = "CMP3_FA_MAN") > public class Man { >- @Id >- @GeneratedValue(strategy=SEQUENCE, generator="FA_MAN_SEQ_GENERATOR") >- @SequenceGenerator(name="FA_MAN_SEQ_GENERATOR", sequenceName="MAN_SEQ") >+ @Id >+ @GeneratedValue(strategy = SEQUENCE, generator = "FA_MAN_SEQ_GENERATOR") >+ @SequenceGenerator(name = "FA_MAN_SEQ_GENERATOR", sequenceName = "MAN_SEQ") > private Integer id; >- @OneToOne(mappedBy="man") >+ @OneToOne(mappedBy = "man", cascade = ALL) >+ @CascadeOnDelete > private PartnerLink partnerLink; >- >- private String name; > >- public Man() {} >- >- public Integer getId() { >- return id; >- } >- >- public String getName(){ >- return name; >- } >- >- public PartnerLink getPartnerLink() { >- return partnerLink; >+ private String name; >+ >+ public Man() { > } >- >- public void setId(Integer id) { >- this.id = id; >+ >+ public Integer getId() { >+ return id; > } >- >- public void setName(String name){ >- this.name = name; >- } >- >- public void setPartnerLink(PartnerLink partnerLink) { >- this.partnerLink = partnerLink; >+ >+ public String getName() { >+ return name; > } >+ >+ public PartnerLink getPartnerLink() { >+ return partnerLink; >+ } >+ >+ public void setId(Integer id) { >+ this.id = id; >+ } >+ >+ public void setName(String name) { >+ this.name = name; >+ } >+ >+ public void setPartnerLink(PartnerLink partnerLink) { >+ this.partnerLink = partnerLink; >+ } > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/PartnerLink.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/PartnerLink.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/PartnerLink.java (working copy) >@@ -15,60 +15,59 @@ > import javax.persistence.*; > > import static javax.persistence.CascadeType.*; >-import static javax.persistence.FetchType.*; > >-@Entity(name="PartnerLink") >-@Table(name="CMP3_FA_MW") >+@Entity(name = "PartnerLink") >+@Table(name = "CMP3_FA_MW") > @IdClass(org.eclipse.persistence.testing.models.jpa.fieldaccess.advanced.PartnerLinkPK.class) > public class PartnerLink { >- @Id >- @Column(name="M", insertable=false, updatable=false) >- private Integer manId; >+ @Id >+ @Column(name = "M", insertable = false, updatable = false) >+ private Integer manId; > >- @Id >- @Column(name="W", insertable=false, updatable=false) >- private Integer womanId; >+ @Id >+ @Column(name = "W", insertable = false, updatable = false) >+ private Integer womanId; > >- @OneToOne(cascade=PERSIST, fetch=FetchType.LAZY) >- @JoinColumn(name="M") >+ @OneToOne(cascade = PERSIST, fetch = FetchType.LAZY) >+ @JoinColumn(name = "M") > private Man man; >- > >- @OneToOne(cascade=PERSIST, fetch=FetchType.LAZY) >- @JoinColumn(name="W") >+ @OneToOne(cascade = PERSIST, fetch = FetchType.LAZY) >+ @JoinColumn(name = "W") > private Woman woman; > >- public PartnerLink() {} >- >- public Man getMan() { >- return man; >- } >- >- public Integer getManId() { >+ public PartnerLink() { >+ } >+ >+ public Man getMan() { >+ return man; >+ } >+ >+ public Integer getManId() { > manId = (getMan() == null) ? null : getMan().getId(); >- return manId; >+ return manId; > } >- >- public Woman getWoman() { >- return woman; >- } >- >- public Integer getWomanId() { >+ >+ public Woman getWoman() { >+ return woman; >+ } >+ >+ public Integer getWomanId() { > womanId = (getWoman() == null) ? null : getWoman().getId(); >- return womanId; >+ return womanId; > } >- >- public void setMan(Man man) { >- this.man = man; >+ >+ public void setMan(Man man) { >+ this.man = man; > } >- >- public void setManId(Integer manId) { >+ >+ public void setManId(Integer manId) { > } >- >- public void setWoman(Woman woman) { >- this.woman = woman; >+ >+ public void setWoman(Woman woman) { >+ this.woman = woman; > } >- >- public void setWomanId(Integer womanId) { >+ >+ public void setWomanId(Integer womanId) { > } > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/PhoneNumber.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/PhoneNumber.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/PhoneNumber.java (working copy) >@@ -13,6 +13,7 @@ > package org.eclipse.persistence.testing.models.jpa.fieldaccess.advanced; > > import java.io.*; >+import java.util.ArrayList; > import java.util.Collection; > import java.util.List; > import java.util.Vector; >@@ -26,32 +27,36 @@ > import org.eclipse.persistence.queries.FetchGroupTracker; > > /** >- * <p><b>Purpose</b>: Describes an Employee's phone number. >- * <p><b>Description</b>: Used in a 1:M relationship from an employee. >- * Test @PrimaryKey support with composite primary key. >+ * <p> >+ * <b>Purpose</b>: Describes an Employee's phone number. >+ * <p> >+ * <b>Description</b>: Used in a 1:M relationship from an employee. Test @PrimaryKey >+ * support with composite primary key. > */ > @SuppressWarnings("deprecation") >-@Entity(name="PhoneNumber") >-@Table(name="CMP3_FA_PHONENUMBER") >-@PrimaryKey(columns={@Column(name="OWNER_ID"),@Column(name="TYPE")}) >+@Entity(name = "PhoneNumber") >+@Table(name = "CMP3_FA_PHONENUMBER") >+@PrimaryKey(columns = { @Column(name = "OWNER_ID"), @Column(name = "TYPE") }) > public class PhoneNumber implements Serializable { >- public enum PhoneStatus { ACTIVE, ASSIGNED, UNASSIGNED, DEAD } >- >- @Column(name="NUMB") >- private String number; >- @Column(name="TYPE") >- private String type; >- @ManyToOne >- @JoinColumn(name="OWNER_ID", referencedColumnName="EMP_ID") >- private Employee owner; >- @Column(name="AREA_CODE") >+ public enum PhoneStatus { >+ ACTIVE, ASSIGNED, UNASSIGNED, DEAD >+ } >+ >+ @Column(name = "NUMB") >+ private String number; >+ @Column(name = "TYPE") >+ private String type; >+ @ManyToOne >+ @JoinColumn(name = "OWNER_ID", referencedColumnName = "EMP_ID") >+ private Employee owner; >+ @Column(name = "AREA_CODE") > private String areaCode; >- >- @BasicCollection >- @CollectionTable(name="CMP3_FA_PHONE_STATUS") >+ >+ @BasicCollection >+ @CollectionTable(name = "CMP3_FA_PHONE_STATUS") > @Enumerated(STRING) > private Collection<PhoneStatus> status; >- >+ > public PhoneNumber() { > this("", "###", "#######"); > } >@@ -61,70 +66,70 @@ > this.areaCode = theAreaCode; > this.number = theNumber; > this.owner = null; >- this.status = new Vector<PhoneStatus>(); >+ this.status = new ArrayList<PhoneStatus>(); > } > > public void addStatus(PhoneStatus status) { > getStatus().add(status); > } >- >- public String getNumber() { >- return number; >+ >+ public String getNumber() { >+ return number; > } >- >- public void setNumber(String number) { >- this.number = number; >+ >+ public void setNumber(String number) { >+ this.number = number; > } > > // Basic collection on an entity that uses a composite primary key. > // We don't specify any of the primary key join columns on the collection >- // table because they should all default accordingly. >+ // table because they should all default accordingly. > public Collection<PhoneStatus> getStatus() { > return status; > } >- >+ > public void setStatus(Collection<PhoneStatus> status) { > this.status = status; > } >- >- public String getType() { >- return type; >+ >+ public String getType() { >+ return type; > } >- >- public void setType(String type) { >- this.type = type; >- } >- >- public String getAreaCode() { >- return areaCode; >+ >+ public void setType(String type) { >+ this.type = type; > } >- >- public void setAreaCode(String areaCode) { >- this.areaCode = areaCode; >- } >- >- public Employee getOwner() { >- return owner; >+ >+ public String getAreaCode() { >+ return areaCode; > } >- >- public void setOwner(Employee owner) { >- this.owner = owner; >- } > >+ public void setAreaCode(String areaCode) { >+ this.areaCode = areaCode; >+ } >+ >+ public Employee getOwner() { >+ return owner; >+ } >+ >+ public void setOwner(Employee owner) { >+ this.owner = owner; >+ } >+ > public void removeStatus(PhoneStatus status) { > getStatus().remove(status); > } >- >+ > /** > * Uses a Vector as its primary key. > */ >- public List buildPK(){ >+ public List buildPK() { > List pk = new Vector(); > pk.add(getOwner().getId()); > pk.add(getType()); > return pk; > } >- >+ > /** > * Example: Phone[Work]: (613) 225-8812 > */ >@@ -134,10 +139,10 @@ > writer.write("PhoneNumber["); > writer.write(getType()); > writer.write("]: ("); >- if(!(this instanceof FetchGroupTracker)) { >+ if (!(this instanceof FetchGroupTracker)) { > writer.write(getAreaCode()); > writer.write(") "); >- >+ > int numberLength = getNumber().length(); > writer.write(getNumber().substring(0, Math.min(3, numberLength))); > if (numberLength > 3) { >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Woman.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Woman.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/fieldaccess/advanced/Woman.java (working copy) >@@ -14,45 +14,48 @@ > > import javax.persistence.*; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; >+ >+import static javax.persistence.CascadeType.ALL; > import static javax.persistence.GenerationType.*; > >-@Entity(name="Woman") >-@Table(name="CMP3_FA_WOMAN") >+@Entity(name = "Woman") >+@Table(name = "CMP3_FA_WOMAN") > public class Woman { >- @Id >- @GeneratedValue(strategy=SEQUENCE, generator="FA_WOMAN_SEQ_GENERATOR") >- @SequenceGenerator(name="FA_WOMAN_SEQ_GENERATOR", sequenceName="WOMAN_SEQ") >+ @Id >+ @GeneratedValue(strategy = SEQUENCE, generator = "FA_WOMAN_SEQ_GENERATOR") >+ @SequenceGenerator(name = "FA_WOMAN_SEQ_GENERATOR", sequenceName = "WOMAN_SEQ") > private Integer id; >- @OneToOne(mappedBy="woman") >+ @OneToOne(mappedBy = "woman", cascade = ALL) >+ @CascadeOnDelete > private PartnerLink partnerLink; > >- private String name; >- >- public Woman() {} >- >- >- public Integer getId() { >- return id; >+ private String name; >+ >+ public Woman() { > } >- >- >- public String getName(){ >- return name; >- } >- >- public PartnerLink getPartnerLink() { >- return partnerLink; >+ >+ public Integer getId() { >+ return id; > } >- >- public void setId(Integer id) { >- this.id = id; >+ >+ public String getName() { >+ return name; > } >- >- public void setName(String name){ >- this.name = name; >- } >- >- public void setPartnerLink(PartnerLink partnerLink) { >- this.partnerLink = partnerLink; >+ >+ public PartnerLink getPartnerLink() { >+ return partnerLink; > } >+ >+ public void setId(Integer id) { >+ this.id = id; >+ } >+ >+ public void setName(String name) { >+ this.name = name; >+ } >+ >+ public void setPartnerLink(PartnerLink partnerLink) { >+ this.partnerLink = partnerLink; >+ } > } >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/advanced/EntityManagerJUnitTestSuite.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/advanced/EntityManagerJUnitTestSuite.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/advanced/EntityManagerJUnitTestSuite.java (working copy) >@@ -126,13 +126,12 @@ > import org.eclipse.persistence.platform.server.was.WebSphere_7_Platform; > > import org.eclipse.persistence.testing.framework.DriverWrapper; >+import org.eclipse.persistence.testing.framework.QuerySQLTracker; > import org.eclipse.persistence.testing.framework.junit.JUnitTestCase; > import org.eclipse.persistence.testing.framework.junit.JUnitTestCaseHelper; > import org.eclipse.persistence.testing.framework.TestProblemException; > import org.eclipse.persistence.testing.models.jpa.advanced.*; > import org.eclipse.persistence.testing.models.jpa.relationships.CustomerCollection; >-import org.eclipse.persistence.testing.models.jpa.relationships.Item; >-import org.eclipse.persistence.testing.models.jpa.relationships.Order; > > /** > * Test the EntityManager API using the advanced model. >@@ -334,6 +333,8 @@ > suite.addTest(new EntityManagerJUnitTestSuite("testFlushClearFind")); > suite.addTest(new EntityManagerJUnitTestSuite("testFlushClearQueryPk")); > suite.addTest(new EntityManagerJUnitTestSuite("testFlushClearQueryNonPK")); >+ suite.addTest(new EntityManagerJUnitTestSuite("testDeleteEmployee")); >+ suite.addTest(new EntityManagerJUnitTestSuite("testDeleteMan")); > if (!isJPA10()) { > suite.addTest(new EntityManagerJUnitTestSuite("testDetachNull")); > suite.addTest(new EntityManagerJUnitTestSuite("testDetachRemovedObject")); >@@ -2686,6 +2687,91 @@ > assertFalse("removed object found after flush", foundAfterFlush); > } > >+ // Test that deleting an employee works correctly. >+ public void testDeleteEmployee() { >+ Employee employee = new Employee(); >+ employee.addPhoneNumber(new PhoneNumber("home", "123", "4567")); >+ employee.addPhoneNumber(new PhoneNumber("fax", "456", "4567")); >+ employee.addResponsibility("work hard"); >+ employee.addResponsibility("write code"); >+ employee.addProject(new Project()); >+ employee.setWorkWeek(new HashSet<Employee.Weekdays>()); >+ employee.getWorkWeek().add(Employee.Weekdays.MONDAY); >+ employee.getWorkWeek().add(Employee.Weekdays.TUESDAY); >+ QuerySQLTracker counter = new QuerySQLTracker(getServerSession()); >+ EntityManager em = createEntityManager(); >+ try { >+ beginTransaction(em); >+ em.persist(employee); >+ commitTransaction(em); >+ closeEntityManager(em); >+ clearCache(); >+ em = createEntityManager(); >+ beginTransaction(em); >+ employee = em.find(Employee.class, employee.getId()); >+ counter.getSqlStatements().clear(); >+ em.remove(employee); >+ commitTransaction(em); >+ if (isWeavingEnabled() && counter.getSqlStatements().size() > 13) { >+ fail("Only 13 sql statements should have occured:" + counter.getSqlStatements().size()); >+ } >+ beginTransaction(em); >+ verifyDelete(employee); >+ commitTransaction(em); >+ } finally { >+ if (isTransactionActive(em)) { >+ rollbackTransaction(em); >+ } >+ closeEntityManager(em); >+ } >+ } >+ >+ // Test that deleting an Man works correctly. >+ public void testDeleteMan() { >+ EntityManager em = createEntityManager(); >+ QuerySQLTracker counter = null; >+ try { >+ beginTransaction(em); >+ Man man = new Man(); >+ em.persist(man); >+ Woman woman = new Woman(); >+ em.persist(woman); >+ PartnerLink link = new PartnerLink(); >+ em.persist(link); >+ man.setPartnerLink(link); >+ link.setMan(man); >+ woman.setPartnerLink(link); >+ link.setWoman(woman); >+ commitTransaction(em); >+ closeEntityManager(em); >+ clearCache(); >+ counter = new QuerySQLTracker(getServerSession()); >+ em = createEntityManager(); >+ beginTransaction(em); >+ // Count SQL. >+ man = em.find(Man.class, man.getId()); >+ woman = em.find(Woman.class, woman.getId()); >+ woman.setPartnerLink(null); >+ counter.getSqlStatements().clear(); >+ em.remove(man); >+ commitTransaction(em); >+ if (counter.getSqlStatements().size() > 2) { >+ fail("Only 2 delete should have occured."); >+ } >+ beginTransaction(em); >+ verifyDelete(man); >+ commitTransaction(em); >+ } finally { >+ if (counter != null) { >+ counter.remove(); >+ } >+ if (isTransactionActive(em)) { >+ rollbackTransaction(em); >+ } >+ closeEntityManager(em); >+ } >+ } >+ > // test for bug 4681287: > // CTS: EXCEPTION EXPECTED ON FIND() IF PK PASSED IN != ATTRIBUTE TYPE > public void testFindWithWrongTypePk() { >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cacheable/CacheableModelJunitTestNone.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cacheable/CacheableModelJunitTestNone.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cacheable/CacheableModelJunitTestNone.java (working copy) >@@ -17,34 +17,17 @@ > ******************************************************************************/ > package org.eclipse.persistence.testing.tests.jpa.cacheable; > >-import java.util.HashMap; >- >-import javax.persistence.CacheRetrieveMode; >-import javax.persistence.CacheStoreMode; >-import javax.persistence.EntityManager; >-import javax.persistence.Query; >- > import junit.framework.*; > >-import org.eclipse.persistence.config.CacheUsage; >-import org.eclipse.persistence.config.QueryHints; > import org.eclipse.persistence.descriptors.ClassDescriptor; > import org.eclipse.persistence.sessions.server.ServerSession; > import org.eclipse.persistence.testing.framework.junit.JUnitTestCase; > import org.eclipse.persistence.testing.models.jpa.cacheable.CacheableTableCreator; >-import org.eclipse.persistence.testing.models.jpa.cacheable.CacheableTrueEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.CacheableFalseEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.ChildCacheableFalseEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.SubCacheableFalseEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.SubCacheableNoneEntity; > > /* > * The test is testing against "MulitPU-2" persistence unit which has <shared-cache-mode> to be NONE > */ > public class CacheableModelJunitTestNone extends JUnitTestCase { >- private static int m_cacheableTrueEntity1Id; >- private static int m_cacheableTrueEntity2Id; >- private static int m_childCacheableFalseEntityId; > > public CacheableModelJunitTestNone() { > super(); >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cacheable/CacheableModelJunitTestUnspecified.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cacheable/CacheableModelJunitTestUnspecified.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cacheable/CacheableModelJunitTestUnspecified.java (working copy) >@@ -17,34 +17,17 @@ > ******************************************************************************/ > package org.eclipse.persistence.testing.tests.jpa.cacheable; > >-import java.util.HashMap; >- >-import javax.persistence.CacheRetrieveMode; >-import javax.persistence.CacheStoreMode; >-import javax.persistence.EntityManager; >-import javax.persistence.Query; >- > import junit.framework.*; > >-import org.eclipse.persistence.config.CacheUsage; >-import org.eclipse.persistence.config.QueryHints; > import org.eclipse.persistence.descriptors.ClassDescriptor; > import org.eclipse.persistence.sessions.server.ServerSession; > import org.eclipse.persistence.testing.framework.junit.JUnitTestCase; > import org.eclipse.persistence.testing.models.jpa.cacheable.CacheableTableCreator; >-import org.eclipse.persistence.testing.models.jpa.cacheable.CacheableTrueEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.CacheableFalseEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.ChildCacheableFalseEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.SubCacheableFalseEntity; >-import org.eclipse.persistence.testing.models.jpa.cacheable.SubCacheableNoneEntity; > > /* > * The test is testing against "MulitPU-5" persistence unit which has <shared-cache-mode> to be UNSPECIFIED > */ > public class CacheableModelJunitTestUnspecified extends JUnitTestCase { >- private static int m_cacheableTrueEntity1Id; >- private static int m_cacheableTrueEntity2Id; >- private static int m_childCacheableFalseEntityId; > > public CacheableModelJunitTestUnspecified() { > super(); >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cascadedeletes/CascadeDeletesJUnitTestSuite.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cascadedeletes/CascadeDeletesJUnitTestSuite.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/cascadedeletes/CascadeDeletesJUnitTestSuite.java (working copy) >@@ -32,7 +32,6 @@ > import org.eclipse.persistence.testing.models.jpa.cascadedeletes.ThreadInfo; > > public class CascadeDeletesJUnitTestSuite extends JUnitTestCase { >- private static Integer m_Id; > > public CascadeDeletesJUnitTestSuite() { > super(); >@@ -94,6 +93,10 @@ > try { > beginTransaction(em); > rootA = em.find(RootA.class, rootA.getId()); >+ // :( >+ for (BranchA a : rootA.getBranchAs()) { >+ a.getLeafs().size(); >+ } > em.remove(rootA); > for (PersistentIdentity entity : allEntities) { > assertNull("Contains found removed entity", em.find(entity.getClass(), entity.getId())); >Index: jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/fieldaccess/advanced/EntityManagerJUnitTestSuite.java >=================================================================== >--- jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/fieldaccess/advanced/EntityManagerJUnitTestSuite.java (revision 8109) >+++ jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/tests/jpa/fieldaccess/advanced/EntityManagerJUnitTestSuite.java (working copy) >@@ -23,6 +23,7 @@ > import java.sql.Date; > import java.util.ArrayList; > import java.util.HashMap; >+import java.util.HashSet; > import java.util.List; > import java.util.Collection; > import java.util.Vector; >@@ -77,6 +78,7 @@ > import org.eclipse.persistence.sequencing.Sequence; > import org.eclipse.persistence.logging.SessionLogEntry; > >+import org.eclipse.persistence.testing.framework.QuerySQLTracker; > import org.eclipse.persistence.testing.framework.junit.JUnitTestCase; > import org.eclipse.persistence.testing.framework.junit.JUnitTestCaseHelper; > import org.eclipse.persistence.testing.models.jpa.fieldaccess.advanced.*; >@@ -211,6 +213,8 @@ > suite.addTest(new EntityManagerJUnitTestSuite("testCopyingAddress")); > suite.addTest(new EntityManagerJUnitTestSuite("testSequencePreallocationUsingCallbackTest")); > suite.addTest(new EntityManagerJUnitTestSuite("updateAttributeWithObjectTest")); >+ suite.addTest(new EntityManagerJUnitTestSuite("testDeleteEmployee")); >+ suite.addTest(new EntityManagerJUnitTestSuite("testDeleteMan")); > > return suite; > } >@@ -3566,6 +3570,46 @@ > } > } > >+ // Test that deleting an employee works correctly. >+ public void testDeleteEmployee() { >+ Employee employee = new Employee(); >+ employee.addPhoneNumber(new PhoneNumber("home", "123", "4567")); >+ employee.addPhoneNumber(new PhoneNumber("fax", "456", "4567")); >+ employee.addResponsibility("work hard"); >+ employee.addResponsibility("write code"); >+ employee.addProject(new Project()); >+ employee.setWorkWeek(new HashSet<Employee.Weekdays>()); >+ employee.getWorkWeek().add(Employee.Weekdays.MONDAY); >+ employee.getWorkWeek().add(Employee.Weekdays.TUESDAY); >+ EntityManager em = createEntityManager("fieldaccess"); >+ QuerySQLTracker counter = null; >+ try { >+ beginTransaction(em); >+ em.persist(employee); >+ commitTransaction(em); >+ closeEntityManager(em); >+ clearCache("fieldaccess"); >+ counter = new QuerySQLTracker(getServerSession("fieldaccess")); >+ em = createEntityManager("fieldaccess"); >+ beginTransaction(em); >+ employee = em.find(Employee.class, employee.getId()); >+ counter.getSqlStatements().clear(); >+ em.remove(employee); >+ if (isWeavingEnabled() && counter.getSqlStatements().size() > 3) { >+ fail("Only 2 delete and 1 select should have occured."); >+ } >+ commitTransaction(em); >+ beginTransaction(em); >+ verifyDelete(employee, "fieldaccess"); >+ commitTransaction(em); >+ } finally { >+ if (isTransactionActive(em)) { >+ rollbackTransaction(em); >+ } >+ closeEntityManager(em); >+ } >+ } >+ > //bug gf674 - EJBQL delete query with IS NULL in WHERE clause produces wrong sql > public void testDeleteAllPhonesWithNullOwner() { > EntityManager em = createEntityManager("fieldaccess"); >@@ -4724,5 +4768,50 @@ > event.updateAttributeWithObject("period", period); > } > } >+ >+ // Test that deleting an Man works correctly. >+ public void testDeleteMan() { >+ EntityManager em = createEntityManager("fieldaccess"); >+ QuerySQLTracker counter = null; >+ try { >+ beginTransaction(em); >+ Man man = new Man(); >+ em.persist(man); >+ Woman woman = new Woman(); >+ em.persist(woman); >+ PartnerLink link = new PartnerLink(); >+ man.setPartnerLink(link); >+ link.setMan(man); >+ woman.setPartnerLink(link); >+ link.setWoman(woman); >+ commitTransaction(em); >+ closeEntityManager(em); >+ clearCache("fieldaccess"); >+ counter = new QuerySQLTracker(getServerSession("fieldaccess")); >+ em = createEntityManager("fieldaccess"); >+ beginTransaction(em); >+ // Count SQL. >+ man = em.find(Man.class, man.getId()); >+ woman = em.find(Woman.class, woman.getId()); >+ woman.setPartnerLink(null); >+ counter.getSqlStatements().clear(); >+ em.remove(man); >+ commitTransaction(em); >+ if (counter.getSqlStatements().size() > 1) { >+ fail("Only one delete should have occured."); >+ } >+ beginTransaction(em); >+ verifyDelete(man, "fieldaccess"); >+ commitTransaction(em); >+ } finally { >+ if (counter != null) { >+ counter.remove(); >+ } >+ if (isTransactionActive(em)) { >+ rollbackTransaction(em); >+ } >+ closeEntityManager(em); >+ } >+ } > > } >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/EntityManagerImpl.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/EntityManagerImpl.java (revision 8109) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/EntityManagerImpl.java (working copy) >@@ -51,6 +51,7 @@ > import org.eclipse.persistence.sessions.*; > import org.eclipse.persistence.sessions.factories.ReferenceMode; > import org.eclipse.persistence.sessions.factories.SessionManager; >+import org.eclipse.persistence.sessions.server.ClientSession; > import org.eclipse.persistence.sessions.server.ConnectionPolicy; > import org.eclipse.persistence.sessions.server.Server; > import org.eclipse.persistence.sessions.server.ServerSession; >@@ -1570,7 +1571,8 @@ > if(this.connectionPolicy == null) { > createConnectionPolicy(); > } >- this.extendedPersistenceContext = new RepeatableWriteUnitOfWork(this.serverSession.acquireClientSession(connectionPolicy, properties), this.referenceMode); >+ ClientSession client = this.serverSession.acquireClientSession(connectionPolicy, properties); >+ this.extendedPersistenceContext = new RepeatableWriteUnitOfWork(client, this.referenceMode); > this.extendedPersistenceContext.setResumeUnitOfWorkOnTransactionCompletion(!this.closeOnCommit); > this.extendedPersistenceContext.setShouldDiscoverNewObjects(this.persistOnCommit); > this.extendedPersistenceContext.setDiscoverUnregisteredNewObjectsWithoutPersist(this.commitWithoutPersistRules); >@@ -1584,6 +1586,9 @@ > // creation of PC > transaction.registerUnitOfWorkWithTxn(this.extendedPersistenceContext); > } >+ if (client.shouldLog(SessionLog.FINER, SessionLog.TRANSACTION)) { >+ client.log(SessionLog.FINER, SessionLog.TRANSACTION, "acquire_unit_of_work_with_argument", String.valueOf(System.identityHashCode(this.extendedPersistenceContext))); >+ } > } > if (this.beginEarlyTransaction && txn != null && !this.extendedPersistenceContext.isInTransaction()) { > // gf3334, force persistence context early transaction >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/classes/EntityAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/classes/EntityAccessor.java (revision 8117) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/classes/EntityAccessor.java (working copy) >@@ -91,6 +91,7 @@ > import javax.persistence.SecondaryTables; > import javax.persistence.Table; > >+import org.eclipse.persistence.annotations.CascadeOnDelete; > import org.eclipse.persistence.annotations.ClassExtractor; > import org.eclipse.persistence.annotations.Index; > import org.eclipse.persistence.annotations.Indexes; >@@ -147,6 +148,8 @@ > > private TableMetadata m_table; > >+ private boolean m_cascadeOnDelete; >+ > /** > * INTERNAL: > */ >@@ -693,7 +696,8 @@ > > // Process the Table and Inheritance metadata. > processTableAndInheritance(); >- processIndexes(); >+ processIndexes(); >+ processCascadeOnDelete(); > > // Process the common class level attributes that an entity or mapped > // superclass may define. This needs to be done before processing >@@ -1218,6 +1222,21 @@ > > /** > * INTERNAL: >+ * Check if CascadeOnDelete was set on the Entity. >+ */ >+ protected void processCascadeOnDelete() { >+ MetadataAnnotation annotation = getAnnotation(CascadeOnDelete.class); >+ >+ if (annotation != null) { >+ m_cascadeOnDelete = true; >+ } >+ if (m_cascadeOnDelete) { >+ getDescriptor().getClassDescriptor().setIsCascadeOnDeleteSetOnDatabaseOnSecondaryTables(true); >+ } >+ } >+ >+ /** >+ * INTERNAL: > * Process index information for the given metadata descriptor. > */ > protected void processIndexes() { >@@ -1266,8 +1285,10 @@ > getDescriptor().getPrimaryTable().getIndexes().add(indexDefinition); > } else if (table.equals(getDescriptor().getPrimaryTable().getQualifiedName()) > || table.equals(getDescriptor().getPrimaryTable().getName())) { >+ indexDefinition.setTargetTable(table); > getDescriptor().getPrimaryTable().getIndexes().add(indexDefinition); > } else { >+ indexDefinition.setTargetTable(table); > boolean found = false; > for (DatabaseTable databaseTable : getDescriptor().getClassDescriptor().getTables()) { > if (table.equals(databaseTable.getQualifiedName()) || table.equals(databaseTable.getName())) { >@@ -1426,6 +1447,14 @@ > m_table = table; > } > >+ public boolean isCascadeOnDelete() { >+ return m_cascadeOnDelete; >+ } >+ >+ public void setCascadeOnDelete(boolean cascadeOnDelete) { >+ m_cascadeOnDelete = cascadeOnDelete; >+ } >+ > /** > * INTERNAL: > * Validate a OptimisticLocking(type=VERSION_COLUMN) setting. That is, >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/CollectionAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/CollectionAccessor.java (revision 8109) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/CollectionAccessor.java (working copy) >@@ -573,32 +573,12 @@ > } > > /** >- * INTERNAL: >+ * Configure the CollectionMapping properties based on the metadata. > */ > protected void process(CollectionMapping mapping) { >- // Set the mapping, this must be done first. >- setMapping(mapping); >+ // Process common properties first. >+ processRelationshipMapping(mapping); > >- mapping.setIsReadOnly(false); >- mapping.setIsLazy(isLazy()); >- mapping.setAttributeName(getAttributeName()); >- mapping.setReferenceClassName(getReferenceClassName()); >- >- // Process join fetch type. >- processJoinFetch(getJoinFetch(), mapping); >- >- // Process the batch fetch if specified. >- processBatchFetch(getBatchFetch(), mapping); >- >- // Process the orphanRemoval or PrivateOwned >- processOrphanRemoval(mapping); >- >- // Will check for PROPERTY access >- setAccessorMethods(mapping); >- >- // Process the cascade types. >- processCascadeTypes(mapping); >- > // Process an OrderBy if there is one. > processOrderBy(mapping); > >@@ -615,7 +595,7 @@ > if (m_orderColumn != null) { > m_orderColumn.process(mapping, getDescriptor()); > } >- } >+ } > > /** > * INTERNAL: >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/DirectCollectionAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/DirectCollectionAccessor.java (revision 8117) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/DirectCollectionAccessor.java (working copy) >@@ -32,6 +32,7 @@ > import javax.persistence.FetchType; > > import org.eclipse.persistence.annotations.BatchFetch; >+import org.eclipse.persistence.annotations.CascadeOnDelete; > import org.eclipse.persistence.annotations.JoinFetch; > import org.eclipse.persistence.internal.helper.DatabaseTable; > import org.eclipse.persistence.internal.jpa.metadata.MetadataDescriptor; >@@ -60,6 +61,7 @@ > private String m_joinFetch; > private String m_batchFetch; > private Integer m_batchFetchSize; >+ private boolean m_cascadeOnDelete; > private CollectionTableMetadata m_collectionTable; > > /** >@@ -95,11 +97,21 @@ > m_batchFetchSize = (Integer) batchFetch.getAttribute("size"); > } > >+ m_cascadeOnDelete = isAnnotationPresent(CascadeOnDelete.class); >+ > // Since BasicCollection and ElementCollection look for different > // collection tables, we will not initialize/look for one here. Those > // accessors will be responsible for loading their collection table. > } > >+ public boolean isCascadeOnDelete() { >+ return m_cascadeOnDelete; >+ } >+ >+ public void setCascadeOnDelete(boolean cascadeOnDelete) { >+ m_cascadeOnDelete = cascadeOnDelete; >+ } >+ > /** > * INTERNAL: > */ >@@ -304,6 +316,8 @@ > > // The spec. requires pessimistic lock to be extend-able to CollectionTable > mapping.setShouldExtendPessimisticLockScope(true); >+ >+ mapping.setIsCascadeOnDeleteSetOnDatabase(isCascadeOnDelete()); > } > > /** >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/EmbeddedAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/EmbeddedAccessor.java (revision 8109) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/EmbeddedAccessor.java (working copy) >@@ -190,7 +190,6 @@ > AggregateObjectMapping mapping = new AggregateObjectMapping(); > setMapping(mapping); > >- mapping.setIsReadOnly(false); > mapping.setIsNullAllowed(true); > mapping.setReferenceClassName(getReferenceClassName()); > mapping.setAttributeName(getAttributeName()); >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/ManyToOneAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/ManyToOneAccessor.java (revision 8109) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/ManyToOneAccessor.java (working copy) >@@ -105,7 +105,6 @@ > > // Initialize our mapping now with what we found. > ManyToOneMapping mapping = initManyToOneMapping(); >- setMapping(mapping); > > // Process the owning keys for this mapping. > processOwningMappingKeys(mapping); >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/ObjectAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/ObjectAccessor.java (revision 8109) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/ObjectAccessor.java (working copy) >@@ -302,30 +302,14 @@ > protected OneToOneMapping initOneToOneMapping() { > OneToOneMapping mapping = new OneToOneMapping(); > mapping.setIsOneToOneRelationship(true); >- mapping.setIsReadOnly(false); >+ processRelationshipMapping(mapping); >+ > mapping.setIsOptional(isOptional()); >- mapping.setAttributeName(getAttributeName()); >- mapping.setReferenceClassName(getReferenceClassName()); > mapping.setDerivesId(derivesId()); > >- // Process join fetch type. >- processJoinFetch(getJoinFetch(), mapping); >- >- // Process the batch fetch if specified. >- processBatchFetch(getBatchFetch(), mapping); >- >- // Process the orphanRemoval or PrivateOwned >- processOrphanRemoval(mapping); >- > // Process the indirection. > processIndirection(mapping); > >- // Set the getter and setter methods if access is PROPERTY. >- setAccessorMethods(mapping); >- >- // Process the cascade types. >- processCascadeTypes(mapping); >- > // Process a @ReturnInsert and @ReturnUpdate (to log a warning message) > processReturnInsertAndUpdate(); > >@@ -338,30 +322,14 @@ > */ > protected ManyToOneMapping initManyToOneMapping() { > ManyToOneMapping mapping = new ManyToOneMapping(); >- mapping.setIsReadOnly(false); >+ processRelationshipMapping(mapping); >+ > mapping.setIsOptional(isOptional()); >- mapping.setAttributeName(getAttributeName()); >- mapping.setReferenceClassName(getReferenceClassName()); > mapping.setDerivesId(derivesId()); > >- // Process join fetch type. >- processJoinFetch(getJoinFetch(), mapping); >- >- // Process the batch fetch if specified. >- processBatchFetch(getBatchFetch(), mapping); >- >- // Process the orphanRemoval or PrivateOwned >- processOrphanRemoval(mapping); >- > // Process the indirection. > processIndirection(mapping); > >- // Set the getter and setter methods if access is PROPERTY. >- setAccessorMethods(mapping); >- >- // Process the cascade types. >- processCascadeTypes(mapping); >- > // Process a @ReturnInsert and @ReturnUpdate (to log a warning message) > processReturnInsertAndUpdate(); > >@@ -512,8 +480,6 @@ > } else { > mapping.setUsesIndirection(usesIndirection); > } >- >- mapping.setIsLazy(isLazy()); > } > > /** >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/OneToOneAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/OneToOneAccessor.java (revision 8109) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/OneToOneAccessor.java (working copy) >@@ -100,7 +100,6 @@ > > // Initialize our mapping now with what we found. > OneToOneMapping mapping = initOneToOneMapping(); >- setMapping(mapping); > > if (hasMappedBy()) { > // Non-owning side, process the foreign keys from the owner. >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/RelationshipAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/RelationshipAccessor.java (revision 8117) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/RelationshipAccessor.java (working copy) >@@ -57,6 +57,7 @@ > import org.eclipse.persistence.mappings.RelationTableMechanism; > > import org.eclipse.persistence.annotations.BatchFetch; >+import org.eclipse.persistence.annotations.CascadeOnDelete; > import org.eclipse.persistence.annotations.JoinFetch; > import org.eclipse.persistence.annotations.PrivateOwned; > import org.eclipse.persistence.exceptions.ValidationException; >@@ -105,6 +106,8 @@ > > private String m_targetEntityName; > >+ private boolean m_cascadeOnDelete; >+ > /** > * INTERNAL: > */ >@@ -140,6 +143,8 @@ > // Set the private owned if one is present. > m_privateOwned = isAnnotationPresent(PrivateOwned.class); > >+ m_cascadeOnDelete = isAnnotationPresent(CascadeOnDelete.class); >+ > // Set the join columns if some are present. > // Process all the join columns first. > MetadataAnnotation joinColumns = getAnnotation(JoinColumns.class); >@@ -675,6 +680,34 @@ > } > > /** >+ * Process settings common to ForeignReferenceMapping. >+ */ >+ protected void processRelationshipMapping(ForeignReferenceMapping mapping) { >+ // Set the mapping, this must be done first. >+ setMapping(mapping); >+ >+ mapping.setIsLazy(isLazy()); >+ mapping.setAttributeName(getAttributeName()); >+ mapping.setReferenceClassName(getReferenceClassName()); >+ mapping.setIsCascadeOnDeleteSetOnDatabase(isCascadeOnDelete()); >+ >+ // Process join fetch type. >+ processJoinFetch(getJoinFetch(), mapping); >+ >+ // Process the batch fetch if specified. >+ processBatchFetch(getBatchFetch(), mapping); >+ >+ // Process the orphanRemoval or PrivateOwned >+ processOrphanRemoval(mapping); >+ >+ // Will check for PROPERTY access >+ setAccessorMethods(mapping); >+ >+ // Process the cascade types. >+ processCascadeTypes(mapping); >+ } >+ >+ /** > * INTERNAL: > * Set the getter and setter access methods for this accessor. > */ >@@ -806,6 +839,14 @@ > m_targetEntityName = targetEntityName; > } > >+ public boolean isCascadeOnDelete() { >+ return m_cascadeOnDelete; >+ } >+ >+ public void setCascadeOnDelete(boolean cascadeOnDelete) { >+ m_cascadeOnDelete = cascadeOnDelete; >+ } >+ > /** > * INTERNAL: > */ >Index: jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/VariableOneToOneAccessor.java >=================================================================== >--- jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/VariableOneToOneAccessor.java (revision 8109) >+++ jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/metadata/accessors/mappings/VariableOneToOneAccessor.java (working copy) >@@ -238,25 +238,13 @@ > > // Now process our variable one to one mapping. > VariableOneToOneMapping mapping = new VariableOneToOneMapping(); >- setMapping(mapping); >+ processRelationshipMapping(mapping); > >- mapping.setIsReadOnly(false); > mapping.setIsOptional(isOptional()); >- mapping.setAttributeName(getAttributeName()); >- mapping.setReferenceClassName(getReferenceClassName()); > >- // Process the orphanRemoval or PrivateOwned >- processOrphanRemoval(mapping); >- > // Process the indirection. > processIndirection(mapping); > >- // Set the getter and setter methods if access is PROPERTY. >- setAccessorMethods(mapping); >- >- // Process the cascade types. >- processCascadeTypes(mapping); >- > // Process a @ReturnInsert and @ReturnUpdate (to log a warning message) > processReturnInsertAndUpdate(); > >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingByteArrayTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingByteArrayTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingByteArrayTestCases.java (working copy) >@@ -15,7 +15,7 @@ > import java.io.InputStream; > import java.util.HashMap; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -90,7 +90,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(1, descriptors.size()); > } > } >\ No newline at end of file >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingClassAndAdapterClassTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingClassAndAdapterClassTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingClassAndAdapterClassTestCases.java (working copy) >@@ -18,7 +18,7 @@ > import java.util.HashMap; > import java.util.List; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; >@@ -133,7 +133,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(1, descriptors.size()); > } > } >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingCollectionTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingCollectionTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingCollectionTestCases.java (working copy) >@@ -17,7 +17,7 @@ > import java.util.Collection; > import java.util.HashMap; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -121,7 +121,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(1, descriptors.size()); > } > >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListClassAndTypeTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListClassAndTypeTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListClassAndTypeTestCases.java (working copy) >@@ -17,7 +17,7 @@ > import java.util.HashMap; > import java.util.List; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -103,7 +103,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(2, descriptors.size()); > } > >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListObjectsTypeTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListObjectsTypeTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListObjectsTypeTestCases.java (working copy) >@@ -17,7 +17,7 @@ > import java.util.HashMap; > import java.util.List; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -102,7 +102,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(2, descriptors.size()); > } > >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListTypeTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListTypeTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingListTypeTestCases.java (working copy) >@@ -18,7 +18,7 @@ > import java.util.HashMap; > import java.util.List; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.bind.annotation.XmlList; >@@ -135,7 +135,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(4, descriptors.size()); > } > >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingMapsTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingMapsTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingMapsTestCases.java (working copy) >@@ -15,7 +15,7 @@ > import java.io.InputStream; > import java.util.HashMap; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -120,7 +120,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(10, descriptors.size()); > } > } >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingStringArrayClassesTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingStringArrayClassesTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingStringArrayClassesTestCases.java (working copy) >@@ -15,7 +15,7 @@ > import java.io.InputStream; > import java.util.HashMap; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -90,7 +90,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(1, descriptors.size()); > } > } >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingStringClassTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingStringClassTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/ConflictingStringClassTestCases.java (working copy) >@@ -15,7 +15,7 @@ > import java.io.InputStream; > import java.util.HashMap; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -82,7 +82,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(1, descriptors.size()); > } > >Index: moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/NonConflictingListClassAndTypeTestCases.java >=================================================================== >--- moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/NonConflictingListClassAndTypeTestCases.java (revision 8109) >+++ moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/typemappinginfo/collisions/NonConflictingListClassAndTypeTestCases.java (working copy) >@@ -17,7 +17,7 @@ > import java.util.HashMap; > import java.util.List; > import java.util.Map; >-import java.util.Vector; >+import java.util.List; > > import javax.xml.bind.JAXBElement; > import javax.xml.namespace.QName; >@@ -87,7 +87,7 @@ > } > > public void testDescriptorsSize(){ >- Vector descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); >+ List descriptors = ((org.eclipse.persistence.jaxb.JAXBContext)jaxbContext).getXMLContext().getSession(0).getProject().getOrderedDescriptors(); > assertEquals(2, descriptors.size()); > } > } >Index: moxy/org.eclipse.persistence.moxy/src/org/eclipse/persistence/jaxb/JAXBContext.java >=================================================================== >--- moxy/org.eclipse.persistence.moxy/src/org/eclipse/persistence/jaxb/JAXBContext.java (revision 8109) >+++ moxy/org.eclipse.persistence.moxy/src/org/eclipse/persistence/jaxb/JAXBContext.java (working copy) >@@ -20,7 +20,6 @@ > import java.util.Iterator; > import java.util.List; > import java.util.Map; >-import java.util.Vector; > import java.util.Map.Entry; > > import javax.xml.bind.SchemaOutputResolver; >@@ -167,7 +166,7 @@ > List<XMLDescriptor> descriptorsToProcess = new ArrayList<XMLDescriptor>(); > List<Session> sessions = xmlContext.getSessions(); > for (Session session : sessions) { >- Vector<XMLDescriptor> descriptors = session.getProject().getOrderedDescriptors(); >+ List<XMLDescriptor> descriptors = (List<XMLDescriptor>)(List)session.getProject().getOrderedDescriptors(); > for (XMLDescriptor xDesc : descriptors) { > descriptorsToProcess.add(xDesc); > }
You cannot view the attachment while viewing its details because your browser does not support IFRAMEs.
View the attachment on a separate page
.
View Attachment As Diff
View Attachment As Raw
Actions:
View
|
Diff
Attachments on
bug 324341
: 178847