You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@openjpa.apache.org by pc...@apache.org on 2006/07/19 23:35:07 UTC
svn commit: r423615 [28/44] - in /incubator/openjpa/trunk: ./
openjpa-jdbc-5/ openjpa-jdbc-5/src/ openjpa-jdbc-5/src/main/
openjpa-jdbc-5/src/main/java/ openjpa-jdbc-5/src/main/java/org/
openjpa-jdbc-5/src/main/java/org/apache/ openjpa-jdbc-5/src/main/...
Added: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Table.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Table.java?rev=423615&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Table.java (added)
+++ incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Table.java Wed Jul 19 14:34:44 2006
@@ -0,0 +1,692 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.jdbc.schema;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.commons.collections.map.LinkedMap;
+import org.apache.commons.lang.StringUtils;
+import org.apache.openjpa.lib.meta.SourceTracker;
+import org.apache.openjpa.lib.util.Localizer;
+
+/**
+ * Represents a database table.
+ *
+ * @author Abe White
+ * @author Stephen Kim
+ */
+public class Table
+ extends NameSet
+ implements Comparable, SourceTracker {
+
+ private static final Localizer _loc = Localizer.forPackage(Table.class);
+
+ private String _name = null;
+ private String _schemaName = null;
+ private Map _colMap = null;
+ private Map _idxMap = null;
+ private Collection _fkList = null;
+ private Collection _unqList = null;
+ private Schema _schema = null;
+ private PrimaryKey _pk = null;
+
+ // keep track of source
+ private File _source = null;
+ private int _srcType = SRC_OTHER;
+
+ // cache
+ private String _fullName = null;
+ private Column[] _cols = null;
+ private Column[] _autoAssign = null;
+ private Column[] _rels = null;
+ private ForeignKey[] _fks = null;
+ private Index[] _idxs = null;
+ private Unique[] _unqs = null;
+
+ /**
+ * Default constructor.
+ */
+ public Table() {
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param name the table name
+ * @param schema the table schema
+ */
+ public Table(String name, Schema schema) {
+ setName(name);
+ addName(name, true);
+ if (schema != null)
+ setSchemaName(schema.getName());
+ _schema = schema;
+ }
+
+ /**
+ * Called when the table is removed from its schema. Removes all table
+ * members, and invalidates the table.
+ */
+ void remove() {
+ ForeignKey[] fks = getForeignKeys();
+ for (int i = 0; i < fks.length; i++)
+ removeForeignKey(fks[i]);
+ Index[] idxs = getIndexes();
+ for (int i = 0; i < idxs.length; i++)
+ removeIndex(idxs[i]);
+ Unique[] unqs = getUniques();
+ for (int i = 0; i < unqs.length; i++)
+ removeUnique(unqs[i]);
+ removePrimaryKey();
+ Column[] cols = getColumns();
+ for (int i = 0; i < cols.length; i++)
+ removeColumn(cols[i]);
+ _schema = null;
+ _schemaName = null;
+ _fullName = null;
+ }
+
+ /**
+ * Return the schema for the table.
+ */
+ public Schema getSchema() {
+ return _schema;
+ }
+
+ /**
+ * The table's schema name.
+ */
+ public String getSchemaName() {
+ return _schemaName;
+ }
+
+ /**
+ * The table's schema name. You can only call this method on tables
+ * whose schema object is not set.
+ */
+ public void setSchemaName(String name) {
+ if (getSchema() != null)
+ throw new IllegalStateException();
+ _schemaName = name;
+ _fullName = null;
+ }
+
+ /**
+ * Return the name of the table.
+ */
+ public String getName() {
+ return _name;
+ }
+
+ /**
+ * Set the name of the table. This method can only be called on tables
+ * that are not part of a schema.
+ */
+ public void setName(String name) {
+ if (getSchema() != null)
+ throw new IllegalStateException();
+ _name = name;
+ _fullName = null;
+ }
+
+ /**
+ * Return the table name, including schema, using '.' as the
+ * catalog separator.
+ */
+ public String getFullName() {
+ if (_fullName == null) {
+ Schema schema = getSchema();
+ if (schema == null || schema.getName() == null)
+ _fullName = getName();
+ else
+ _fullName = schema.getName() + "." + getName();
+ }
+ return _fullName;
+ }
+
+ public File getSourceFile() {
+ return _source;
+ }
+
+ public Object getSourceScope() {
+ return null;
+ }
+
+ public int getSourceType() {
+ return _srcType;
+ }
+
+ public void setSource(File source, int srcType) {
+ _source = source;
+ _srcType = srcType;
+ }
+
+ public String getResourceName() {
+ return getFullName();
+ }
+
+ /**
+ * Return the table's columns, in alphabetical order.
+ */
+ public Column[] getColumns() {
+ if (_cols == null) {
+ if (_colMap == null)
+ _cols = Schemas.EMPTY_COLUMNS;
+ else {
+ Column[] cols = new Column[_colMap.size()];
+ Iterator itr = _colMap.values().iterator();
+ for (int i = 0; itr.hasNext(); i++) {
+ cols[i] = (Column) itr.next();
+ cols[i].setIndex(i);
+ }
+ _cols = cols;
+ }
+ }
+ return _cols;
+ }
+
+ /**
+ * Return this table's auto-assigned columns.
+ */
+ public Column[] getAutoAssignedColumns() {
+ if (_autoAssign == null) {
+ if (_colMap == null)
+ _autoAssign = Schemas.EMPTY_COLUMNS;
+ else {
+ Collection autos = null;
+ Column[] cols = getColumns();
+ for (int i = 0; i < cols.length; i++) {
+ if (cols[i].isAutoAssigned()) {
+ if (autos == null)
+ autos = new ArrayList(3);
+ autos.add(cols[i]);
+ }
+ }
+ _autoAssign = (autos == null) ? Schemas.EMPTY_COLUMNS
+ : (Column[]) autos.toArray(new Column[autos.size()]);
+ }
+ }
+ return _autoAssign;
+ }
+
+ /**
+ * Return this table's relation id columns.
+ */
+ public Column[] getRelationIdColumns() {
+ if (_rels == null) {
+ if (_colMap == null)
+ _rels = Schemas.EMPTY_COLUMNS;
+ else {
+ Collection rels = null;
+ Column[] cols = getColumns();
+ for (int i = 0; i < cols.length; i++) {
+ if (cols[i].isRelationId()) {
+ if (rels == null)
+ rels = new ArrayList(3);
+ rels.add(cols[i]);
+ }
+ }
+ _rels = (rels == null) ? Schemas.EMPTY_COLUMNS
+ : (Column[]) rels.toArray(new Column[rels.size()]);
+ }
+ }
+ return _rels;
+ }
+
+ /**
+ * Return the column with the given name, or null if none.
+ */
+ public Column getColumn(String name) {
+ if (name == null || _colMap == null)
+ return null;
+ return (Column) _colMap.get(name.toUpperCase());
+ }
+
+ /**
+ * Add a column to the table.
+ */
+ public Column addColumn(String name) {
+ addName(name, true);
+ Schema schema = getSchema();
+ Column col;
+ if (schema != null && schema.getSchemaGroup() != null)
+ col = schema.getSchemaGroup().newColumn(name, this);
+ else
+ col = new Column(name, this);
+ if (_colMap == null)
+ _colMap = new LinkedMap();
+ _colMap.put(name.toUpperCase(), col);
+ _cols = null;
+ return col;
+ }
+
+ /**
+ * Remove the given column from the table.
+ *
+ * @return true if the column was removed, false if not in the table
+ */
+ public boolean removeColumn(Column col) {
+ if (col == null || _colMap == null)
+ return false;
+
+ Column cur = (Column) _colMap.get(col.getName().toUpperCase());
+ if (!col.equals(cur))
+ return false;
+
+ removeName(col.getName());
+ _colMap.remove(col.getName().toUpperCase());
+ _cols = null;
+ if (col.isAutoAssigned())
+ _autoAssign = null;
+ if (col.isRelationId())
+ _rels = null;
+ col.remove();
+ return true;
+ }
+
+ /**
+ * Import a column from another table.
+ */
+ public Column importColumn(Column col) {
+ if (col == null)
+ return null;
+
+ Column copy = addColumn(col.getName());
+ copy.setType(col.getType());
+ copy.setTypeName(col.getTypeName());
+ copy.setJavaType(col.getJavaType());
+ copy.setNotNull(col.isNotNull());
+ copy.setDefaultString(col.getDefaultString());
+ copy.setSize(col.getSize());
+ copy.setDecimalDigits(col.getDecimalDigits());
+ copy.setAutoAssigned(col.isAutoAssigned());
+ return copy;
+ }
+
+ /**
+ * Return the primary key for the table, if any.
+ */
+ public PrimaryKey getPrimaryKey() {
+ return _pk;
+ }
+
+ /**
+ * Set the primary key for the table.
+ */
+ public PrimaryKey addPrimaryKey() {
+ return addPrimaryKey(null);
+ }
+
+ /**
+ * Set the primary key for the table.
+ */
+ public PrimaryKey addPrimaryKey(String name) {
+ Schema schema = getSchema();
+ if (schema != null && schema.getSchemaGroup() != null) {
+ schema.getSchemaGroup().addName(name, false);
+ _pk = schema.getSchemaGroup().newPrimaryKey(name, this);
+ } else
+ _pk = new PrimaryKey(name, this);
+ return _pk;
+ }
+
+ /**
+ * Remove the primary key from this table.
+ *
+ * @return true if there was a pk to remove, false otherwise
+ */
+ public boolean removePrimaryKey() {
+ boolean rem = _pk != null;
+ if (rem) {
+ Schema schema = getSchema();
+ if (schema != null && schema.getSchemaGroup() != null)
+ schema.getSchemaGroup().removeName(_pk.getName());
+ _pk.remove();
+ }
+ _pk = null;
+ return rem;
+ }
+
+ /**
+ * Import a primary key; column names must match columns of this table.
+ */
+ public PrimaryKey importPrimaryKey(PrimaryKey pk) {
+ if (pk == null)
+ return null;
+
+ PrimaryKey copy = addPrimaryKey(pk.getName());
+ copy.setLogical(pk.isLogical());
+ Column[] cols = pk.getColumns();
+ for (int i = 0; i < cols.length; i++)
+ copy.addColumn(getColumn(cols[i].getName()));
+ return copy;
+ }
+
+ /**
+ * Return the foreign key with the given name. If multiple foreign keys
+ * have the name, the first match is returned.
+ */
+ public ForeignKey getForeignKey(String name) {
+ ForeignKey[] fks = getForeignKeys();
+ for (int i = 0; i < fks.length; i++)
+ if (StringUtils.equalsIgnoreCase(name, fks[i].getName()))
+ return fks[i];
+ return null;
+ }
+
+ /**
+ * Return all foreign keys for the table.
+ */
+ public ForeignKey[] getForeignKeys() {
+ if (_fks == null) {
+ if (_fkList == null)
+ _fks = Schemas.EMPTY_FOREIGN_KEYS;
+ else {
+ ForeignKey[] fks = new ForeignKey[_fkList.size()];
+ Iterator itr = _fkList.iterator();
+ for (int i = 0; itr.hasNext(); i++) {
+ fks[i] = (ForeignKey) itr.next();
+ fks[i].setIndex(i);
+ }
+ _fks = fks;
+ }
+ }
+ return _fks;
+ }
+
+ /**
+ * Add a foreign key to the table.
+ */
+ public ForeignKey addForeignKey() {
+ return addForeignKey(null);
+ }
+
+ /**
+ * Add a foreign key to the table. Duplicate key names are not allowed.
+ */
+ public ForeignKey addForeignKey(String name) {
+ Schema schema = getSchema();
+ ForeignKey fk;
+ if (schema != null && schema.getSchemaGroup() != null) {
+ schema.getSchemaGroup().addName(name, false);
+ fk = schema.getSchemaGroup().newForeignKey(name, this);
+ } else
+ fk = new ForeignKey(name, this);
+ if (_fkList == null)
+ _fkList = new ArrayList(3);
+ _fkList.add(fk);
+ _fks = null;
+ return fk;
+ }
+
+ /**
+ * Remove the given foreign key from the table.
+ *
+ * @return true if the key was removed, false if not in the table
+ */
+ public boolean removeForeignKey(ForeignKey fk) {
+ if (fk == null || _fkList == null)
+ return false;
+
+ if (!_fkList.remove(fk))
+ return false;
+
+ Schema schema = getSchema();
+ if (schema != null && schema.getSchemaGroup() != null)
+ schema.getSchemaGroup().removeName(fk.getName());
+ _fks = null;
+ fk.remove();
+ return true;
+ }
+
+ /**
+ * Import a foreign key; column names must match columns of this table.
+ */
+ public ForeignKey importForeignKey(ForeignKey fk) {
+ if (fk == null)
+ return null;
+
+ ForeignKey copy = addForeignKey(fk.getName());
+ copy.setDeleteAction(fk.getDeleteAction());
+
+ Schema schema = getSchema();
+ if (schema != null && schema.getSchemaGroup() != null) {
+ Column[] pks = fk.getPrimaryKeyColumns();
+ Table joined = null;
+ if (pks.length > 0)
+ joined = schema.getSchemaGroup().findTable(pks[0].getTable());
+
+ Column[] cols = fk.getColumns();
+ for (int i = 0; i < cols.length; i++)
+ copy.join(getColumn(cols[i].getName()),
+ joined.getColumn(pks[i].getName()));
+
+ cols = fk.getConstantColumns();
+ for (int i = 0; i < cols.length; i++)
+ copy.joinConstant(getColumn(cols[i].getName()),
+ fk.getPrimaryKeyConstant(cols[i]));
+
+ pks = fk.getConstantPrimaryKeyColumns();
+ if (joined == null && pks.length > 0)
+ joined = schema.getSchemaGroup().findTable(pks[0].getTable());
+ for (int i = 0; i < pks.length; i++)
+ copy.joinConstant(fk.getConstant(pks[i]),
+ joined.getColumn(pks[i].getName()));
+ }
+ return copy;
+ }
+
+ /**
+ * Return the table's indexes.
+ */
+ public Index[] getIndexes() {
+ if (_idxs == null)
+ _idxs = (_idxMap == null) ? Schemas.EMPTY_INDEXES : (Index[])
+ _idxMap.values().toArray(new Index[_idxMap.size()]);
+ return _idxs;
+ }
+
+ /**
+ * Return the index with the given name, or null if none.
+ */
+ public Index getIndex(String name) {
+ if (name == null || _idxMap == null)
+ return null;
+ return (Index) _idxMap.get(name.toUpperCase());
+ }
+
+ /**
+ * Add an index to the table.
+ */
+ public Index addIndex(String name) {
+ Schema schema = getSchema();
+ Index idx;
+ if (schema != null && schema.getSchemaGroup() != null) {
+ schema.getSchemaGroup().addName(name, true);
+ idx = schema.getSchemaGroup().newIndex(name, this);
+ } else
+ idx = new Index(name, this);
+ if (_idxMap == null)
+ _idxMap = new TreeMap();
+ _idxMap.put(name.toUpperCase(), idx);
+ _idxs = null;
+ return idx;
+ }
+
+ /**
+ * Remove the given index from the table.
+ *
+ * @return true if the index was removed, false if not in the table
+ */
+ public boolean removeIndex(Index idx) {
+ if (idx == null || _idxMap == null)
+ return false;
+
+ Index cur = (Index) _idxMap.get(idx.getName().toUpperCase());
+ if (!idx.equals(cur))
+ return false;
+
+ _idxMap.remove(idx.getName().toUpperCase());
+ Schema schema = getSchema();
+ if (schema != null && schema.getSchemaGroup() != null)
+ schema.getSchemaGroup().removeName(idx.getName());
+ idx.remove();
+ _idxs = null;
+ return true;
+ }
+
+ /**
+ * Import an index; column names must match columns of this table.
+ */
+ public Index importIndex(Index idx) {
+ if (idx == null)
+ return null;
+
+ Index copy = addIndex(idx.getName());
+ copy.setUnique(idx.isUnique());
+
+ Column[] cols = idx.getColumns();
+ for (int i = 0; i < cols.length; i++)
+ copy.addColumn(getColumn(cols[i].getName()));
+ return copy;
+ }
+
+ /**
+ * Return the table's unique constraints.
+ */
+ public Unique[] getUniques() {
+ if (_unqs == null)
+ _unqs = (_unqList == null) ? Schemas.EMPTY_UNIQUES : (Unique[])
+ _unqList.toArray(new Unique[_unqList.size()]);
+ return _unqs;
+ }
+
+ /**
+ * Return the unique constraint with the given name, or null if none.
+ */
+ public Unique getUnique(String name) {
+ Unique[] unqs = getUniques();
+ for (int i = 0; i < unqs.length; i++)
+ if (StringUtils.equalsIgnoreCase(name, unqs[i].getName()))
+ return unqs[i];
+ return null;
+ }
+
+ /**
+ * Add a unique constraint to the table.
+ */
+ public Unique addUnique(String name) {
+ Schema schema = getSchema();
+ Unique unq;
+ if (schema != null && schema.getSchemaGroup() != null) {
+ schema.getSchemaGroup().addName(name, false);
+ unq = schema.getSchemaGroup().newUnique(name, this);
+ } else
+ unq = new Unique(name, this);
+ if (_unqList == null)
+ _unqList = new ArrayList(3);
+ _unqList.add(unq);
+ _unqs = null;
+ return unq;
+ }
+
+ /**
+ * Remove the given unique constraint from the table.
+ *
+ * @return true if the constraint was removed, false if not in the table
+ */
+ public boolean removeUnique(Unique unq) {
+ if (unq == null || _unqList == null)
+ return false;
+
+ if (!_unqList.remove(unq))
+ return false;
+
+ Schema schema = getSchema();
+ if (schema != null && schema.getSchemaGroup() != null)
+ schema.getSchemaGroup().removeName(unq.getName());
+ _unqs = null;
+ unq.remove();
+ return true;
+ }
+
+ /**
+ * Import a constraint; column names must match columns of this table.
+ */
+ public Unique importUnique(Unique unq) {
+ if (unq == null)
+ return null;
+
+ Unique copy = addUnique(unq.getName());
+ copy.setDeferred(unq.isDeferred());
+
+ Column[] cols = unq.getColumns();
+ for (int i = 0; i < cols.length; i++)
+ copy.addColumn(getColumn(cols[i].getName()));
+ return copy;
+ }
+
+ /**
+ * Called by columns to ensure that all columns are properly indexed
+ * before returning that information to the user.
+ */
+ void indexColumns() {
+ getColumns();
+ }
+
+ /**
+ * Called by foreign keys to ensure that all fks are properly indexed
+ * before returning that information to the user.
+ */
+ void indexForeignKeys() {
+ getForeignKeys();
+ }
+
+ /**
+ * Called by columns when their auto increment status changes.
+ */
+ void changeAutoAssigned(Column col) {
+ _autoAssign = null;
+ }
+
+ /**
+ * Called by columns when their relation id status changes.
+ */
+ void changeRelationId(Column col) {
+ _rels = null;
+ }
+
+ public int compareTo(Object other) {
+ String name = getFullName();
+ String otherName = ((Table) other).getFullName();
+ if (name == null && otherName == null)
+ return 0;
+ if (name == null)
+ return 1;
+ if (otherName == null)
+ return -1;
+ return name.compareTo(otherName);
+ }
+
+ public String toString() {
+ return getFullName();
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Table.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/TableSchemaFactory.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/TableSchemaFactory.java?rev=423615&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/TableSchemaFactory.java (added)
+++ incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/TableSchemaFactory.java Wed Jul 19 14:34:44 2006
@@ -0,0 +1,493 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.jdbc.schema;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.io.Writer;
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Types;
+
+import org.apache.openjpa.jdbc.conf.JDBCConfiguration;
+import org.apache.openjpa.jdbc.conf.JDBCConfigurationImpl;
+import org.apache.openjpa.jdbc.sql.DBDictionary;
+import org.apache.openjpa.jdbc.sql.SQLBuffer;
+import org.apache.openjpa.jdbc.sql.SQLExceptions;
+import org.apache.openjpa.lib.conf.Configurable;
+import org.apache.openjpa.lib.conf.Configuration;
+import org.apache.openjpa.lib.conf.Configurations;
+import org.apache.openjpa.lib.log.Log;
+import org.apache.openjpa.lib.util.Localizer;
+import org.apache.openjpa.lib.util.Options;
+import org.apache.openjpa.meta.JavaTypes;
+import org.apache.openjpa.util.GeneralException;
+import serp.util.Numbers;
+import serp.util.Strings;
+
+/**
+ * Factory that uses an XML schema definition stored in a database table
+ * to record the system schema.
+ *
+ * @author Abe White
+ */
+public class TableSchemaFactory
+ implements SchemaFactory, Configurable {
+
+ public static final String ACTION_ADD = "add";
+ public static final String ACTION_DROP = "drop";
+
+ private static final Localizer _loc = Localizer.forPackage
+ (TableSchemaFactory.class);
+ private static boolean _refreshedTable = false;
+
+ private JDBCConfiguration _conf = null;
+ private Log _log = null;
+ private String _table = "OPENJPA_SCHEMA";
+ private String _pkColumnName = "ID";
+ private String _schemaColumnName = "SCHEMA_DEF";
+ private Column _pkColumn = null;
+ private Column _schemaColumn = null;
+
+ /**
+ * The name of the schema definition table. Defaults to
+ * <code>OPENJPA_SCHEMA</code>.
+ */
+ public String getTable() {
+ return _table;
+ }
+
+ /**
+ * The name of the schema definition table. Defaults to
+ * <code>OPENJPA_SCHEMA</code>.
+ */
+ public void setTable(String name) {
+ _table = name;
+ }
+
+ /**
+ * @deprecated Use {@link #setTable}. Retained for
+ * backwards-compatible auto-configuration.
+ */
+ public void setTableName(String name) {
+ setTable(name);
+ }
+
+ /**
+ * The name of the primary key column on the schema definition table.
+ * Defaults to <code>ID</code>.
+ */
+ public void setPrimaryKeyColumn(String name) {
+ _pkColumnName = name;
+ }
+
+ /**
+ * The name of the primary key column on the schema definition table.
+ * Defaults to <code>ID</code>.
+ */
+ public String getPrimaryKeyColumn() {
+ return _pkColumnName;
+ }
+
+ /**
+ * The name of the schema column on the schema definition table.
+ * Defaults to <code>SCHEMA_DEF</code>.
+ */
+ public void setSchemaColumn(String name) {
+ _schemaColumnName = name;
+ }
+
+ /**
+ * The name of the schema column on the schema definition table.
+ * Defaults to <code>SCHEMA_DEF</code>.
+ */
+ public String getSchemaColumn() {
+ return _schemaColumnName;
+ }
+
+ public JDBCConfiguration getConfiguration() {
+ return _conf;
+ }
+
+ public void setConfiguration(Configuration conf) {
+ _conf = (JDBCConfiguration) conf;
+ _log = _conf.getLog(JDBCConfiguration.LOG_SCHEMA);
+ }
+
+ public void startConfiguration() {
+ }
+
+ public void endConfiguration() {
+ buildTable();
+ }
+
+ public synchronized SchemaGroup readSchema() {
+ String schema = null;
+ try {
+ schema = readSchemaColumn();
+ } catch (SQLException se) {
+ if (_log.isWarnEnabled())
+ _log.warn(_loc.get("bad-sch-read", se));
+ }
+ if (schema == null)
+ return new SchemaGroup();
+
+ XMLSchemaParser parser = new XMLSchemaParser(_conf);
+ try {
+ parser.parse(new StringReader(schema),
+ _schemaColumn.getFullName());
+ } catch (IOException ioe) {
+ throw new GeneralException(ioe);
+ }
+ return parser.getSchemaGroup();
+ }
+
+ public void storeSchema(SchemaGroup schema) {
+ XMLSchemaSerializer ser = new XMLSchemaSerializer(_conf);
+ ser.addAll(schema);
+ Writer writer = new StringWriter();
+ try {
+ ser.serialize(writer, ser.COMPACT);
+ } catch (IOException ioe) {
+ throw new GeneralException(ioe);
+ }
+
+ String schemaStr = writer.toString();
+ try {
+ writeSchemaColumn(schemaStr);
+ } catch (SQLException se) {
+ if (_log.isWarnEnabled())
+ _log.warn(_loc.get("bad-sch-write-1", se));
+
+ // maybe the update failed b/c the sequence table doesn't
+ // exist yet; create it now; note that though we synchronize
+ // here, other JVMs can still be trying to create the table at
+ // the same time (unlikely, since this is a dev-tool op)
+ synchronized (TableSchemaFactory.class) {
+ if (!_refreshedTable) {
+ _refreshedTable = true;
+ try {
+ refreshTable();
+ } catch (Exception e) {
+ if (_log.isWarnEnabled())
+ _log.warn(_loc.get("bad-sch-ref", e));
+ }
+ }
+
+ try {
+ writeSchemaColumn(schemaStr);
+ } catch (Exception e) {
+ if (_log.isWarnEnabled())
+ _log.warn(_loc.get("bad-sch-write-2"));
+
+ // throw original exception
+ throw SQLExceptions.getStore(se,
+ _conf.getDBDictionaryInstance());
+ }
+ }
+ }
+ }
+
+ /**
+ * Creates the schema table in the DB.
+ */
+ public void refreshTable()
+ throws SQLException {
+ if (_log.isInfoEnabled())
+ _log.info(_loc.get("make-sch-table"));
+
+ // create the table
+ SchemaTool tool = new SchemaTool(_conf);
+ tool.setIgnoreErrors(true);
+ tool.createTable(_pkColumn.getTable());
+
+ // insert an empty schema
+ Connection conn = getConnection();
+ PreparedStatement stmnt = null;
+ boolean wasAuto = true;
+ try {
+ wasAuto = conn.getAutoCommit();
+ if (!wasAuto)
+ conn.setAutoCommit(true);
+
+ DBDictionary dict = _conf.getDBDictionaryInstance();
+ stmnt = conn.prepareStatement("INSERT INTO "
+ + dict.getFullName(_pkColumn.getTable(), false)
+ + " (" + _pkColumn + ", " + _schemaColumn + ") VALUES (?, ?)");
+ dict.setInt(stmnt, 1, 1, _pkColumn);
+ dict.setNull(stmnt, 2, _schemaColumn.getType(), _schemaColumn);
+
+ stmnt.executeUpdate();
+ } finally {
+ if (stmnt != null)
+ try {
+ stmnt.close();
+ } catch (SQLException se) {
+ }
+ if (!wasAuto)
+ conn.setAutoCommit(false);
+ try {
+ conn.close();
+ } catch (SQLException se) {
+ }
+ }
+ }
+
+ /**
+ * Drops the sequence table in the DB.
+ */
+ public void dropTable()
+ throws SQLException {
+ if (_log.isInfoEnabled())
+ _log.info(_loc.get("drop-sch-table"));
+
+ // drop the table
+ SchemaTool tool = new SchemaTool(_conf);
+ tool.setIgnoreErrors(true);
+ tool.dropTable(_pkColumn.getTable());
+ }
+
+ /**
+ * Returns the schema as an XML string.
+ */
+ public String readSchemaColumn()
+ throws SQLException {
+ DBDictionary dict = _conf.getDBDictionaryInstance();
+ SQLBuffer sel = new SQLBuffer(dict).append(_schemaColumn);
+ SQLBuffer where = new SQLBuffer(dict).append(_pkColumn).append(" = ").
+ appendValue(Numbers.valueOf(1), _pkColumn);
+ SQLBuffer tables = new SQLBuffer(dict).append(_pkColumn.getTable());
+
+ SQLBuffer select = dict.toSelect(sel, null, tables, where, null,
+ null, null, false, false, 0, Long.MAX_VALUE);
+
+ Connection conn = getConnection();
+ PreparedStatement stmnt = null;
+ ResultSet rs = null;
+ boolean wasAuto = true;
+ try {
+ wasAuto = conn.getAutoCommit();
+ if (!wasAuto)
+ conn.setAutoCommit(true);
+
+ stmnt = select.prepareStatement(conn);
+ rs = stmnt.executeQuery();
+ rs.next();
+ String schema = (_schemaColumn.getType() == Types.CLOB) ?
+ dict.getClobString(rs, 1) : dict.getString(rs, 1);
+ return schema;
+ } finally {
+ if (rs != null)
+ try {
+ rs.close();
+ } catch (SQLException se) {
+ }
+ if (stmnt != null)
+ try {
+ stmnt.close();
+ } catch (SQLException se) {
+ }
+ if (!wasAuto)
+ conn.setAutoCommit(false);
+ try {
+ conn.close();
+ } catch (SQLException se) {
+ }
+ }
+ }
+
+ /**
+ * Writes the schema as a string to the database.
+ */
+ public void writeSchemaColumn(String schema)
+ throws SQLException {
+ // always use special clob handling when dict has max embedded size;
+ // for some reason optimizing for string length causes errors
+ DBDictionary dict = _conf.getDBDictionaryInstance();
+ boolean embedded = dict.maxEmbeddedClobSize == -1;
+ String update;
+ if (embedded)
+ update = "UPDATE " + dict.getFullName(_pkColumn.getTable(), false)
+ + " SET " + _schemaColumn + " = ? WHERE " + _pkColumn + " = ?";
+ else
+ update = "SELECT " + _schemaColumn + " FROM "
+ + dict.getFullName(_pkColumn.getTable(), false)
+ + " WHERE " + _pkColumn + " = ?";
+
+ Connection conn = getConnection();
+ PreparedStatement stmnt = null;
+ ResultSet rs = null;
+ boolean wasAuto = true;
+ try {
+ // if embedded we want autocommit true, else false
+ wasAuto = conn.getAutoCommit();
+ if (wasAuto != embedded)
+ conn.setAutoCommit(embedded);
+
+ if (embedded) {
+ stmnt = conn.prepareStatement(update);
+ if (schema == null)
+ dict.setNull(stmnt, 1, _schemaColumn.getType(),
+ _schemaColumn);
+ else if (_schemaColumn.getType() == Types.CLOB)
+ dict.setClobString(stmnt, 1, schema, _schemaColumn);
+ else
+ dict.setString(stmnt, 1, schema, _schemaColumn);
+ dict.setInt(stmnt, 2, 1, _pkColumn);
+ stmnt.executeUpdate();
+ } else {
+ stmnt = conn.prepareStatement(update,
+ ResultSet.TYPE_SCROLL_INSENSITIVE,
+ ResultSet.CONCUR_UPDATABLE);
+ dict.setInt(stmnt, 1, 1, _pkColumn);
+ rs = stmnt.executeQuery();
+ rs.next();
+ dict.putString(rs.getClob(1), schema);
+ conn.commit();
+ }
+ }
+ finally {
+ if (rs != null)
+ try {
+ rs.close();
+ } catch (SQLException se) {
+ }
+ if (stmnt != null)
+ try {
+ stmnt.close();
+ } catch (SQLException se) {
+ }
+ if (wasAuto != embedded)
+ conn.setAutoCommit(wasAuto);
+ try {
+ conn.close();
+ } catch (SQLException se) {
+ }
+ }
+ }
+
+ /**
+ * Creates the object-level representation of the sequence table.
+ */
+ private void buildTable() {
+ String tableName = Strings.getClassName(_table);
+ String schemaName = Strings.getPackageName(_table);
+ if (schemaName.length() == 0)
+ schemaName = Schemas.getNewTableSchema(_conf);
+
+ // build the table in one of the designated schemas
+ SchemaGroup group = new SchemaGroup();
+ Schema schema = group.addSchema(schemaName);
+
+ Table table = schema.addTable(tableName);
+ PrimaryKey pk = table.addPrimaryKey();
+
+ DBDictionary dict = _conf.getDBDictionaryInstance();
+ _pkColumn = table.addColumn(dict.getValidColumnName
+ (_pkColumnName, table));
+ _pkColumn.setType(dict.getPreferredType(Types.TINYINT));
+ _pkColumn.setJavaType(JavaTypes.INT);
+ pk.addColumn(_pkColumn);
+
+ _schemaColumn = table.addColumn(dict.getValidColumnName
+ (_schemaColumnName, table));
+ _schemaColumn.setType(dict.getPreferredType(Types.CLOB));
+ _schemaColumn.setJavaType(JavaTypes.STRING);
+ }
+
+ /**
+ * Return a connection to use.
+ */
+ private Connection getConnection()
+ throws SQLException {
+ return _conf.getDataSource2(null).getConnection();
+ }
+
+ /////////
+ // Main
+ /////////
+
+ /**
+ * Usage: java org.apache.openjpa.jdbc.schema.TableSchemaFactory
+ * [option]* -action/-a <add | drop>
+ * Where the following options are recognized.
+ * <ul>
+ * <li><i>-properties/-p <properties file or resource></i>: The
+ * path or resource name of a OpenJPA properties file containing
+ * information such as the license key and connection data as
+ * outlined in {@link JDBCConfiguration}. Optional.</li>
+ * <li><i>-<property name> <property value></i>: All bean
+ * properties of the OpenJPA {@link JDBCConfiguration} can be set by
+ * using their names and supplying a value. For example:
+ * <code>-licenseKey adslfja83r3lkadf</code></li>
+ * </ul>
+ * The various actions are as follows.
+ * <ul>
+ * <li><i>add</i>: Create the schema table.</li>
+ * <li><i>drop</i>: Drop the schema table.</li>
+ * </ul>
+ */
+ public static void main(String[] args)
+ throws IOException, SQLException {
+ Options opts = new Options();
+ args = opts.setFromCmdLine(args);
+ JDBCConfiguration conf = new JDBCConfigurationImpl();
+ try {
+ if (!run(conf, args, opts))
+ System.out.println(_loc.get("sch-usage"));
+ } finally {
+ conf.close();
+ }
+ }
+
+ /**
+ * Run the tool. Returns false if invalid options were given.
+ *
+ * @see #main
+ */
+ public static boolean run(JDBCConfiguration conf, String[] args,
+ Options opts)
+ throws IOException, SQLException {
+ if (opts.containsKey("help") || opts.containsKey("-help"))
+ return false;
+
+ String action = opts.removeProperty("action", "a", null);
+ Configurations.populateConfiguration(conf, opts);
+ return run(conf, action);
+ }
+
+ /**
+ * Run the tool.
+ */
+ public static boolean run(JDBCConfiguration conf, String action)
+ throws IOException, SQLException {
+ // run the action
+ TableSchemaFactory factory = new TableSchemaFactory();
+ String props = Configurations.getProperties(conf.getSchemaFactory());
+ Configurations.configureInstance(factory, conf, props);
+
+ if (ACTION_DROP.equals(action))
+ factory.dropTable();
+ else if (ACTION_ADD.equals(action))
+ factory.refreshTable();
+ else
+ return false;
+ return true;
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/TableSchemaFactory.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Unique.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Unique.java?rev=423615&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Unique.java (added)
+++ incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Unique.java Wed Jul 19 14:34:44 2006
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.jdbc.schema;
+
+/**
+ * Represents a unique constraint. It can also represent a partial constraint.
+ *
+ * @author Abe White
+ */
+public class Unique
+ extends LocalConstraint {
+
+ /**
+ * Default constructor.
+ */
+ public Unique() {
+ }
+
+ /**
+ * Constructor.
+ *
+ * @param name the name of the constraint, if any
+ * @param table the table of the constraint
+ */
+ public Unique(String name, Table table) {
+ super(name, table);
+ }
+
+ public boolean isLogical() {
+ return false;
+ }
+
+ /**
+ * Return true if the structure of this primary key matches that of
+ * the given one (same table, same columns).
+ */
+ public boolean equalsUnique(Unique unq) {
+ return equalsLocalConstraint(unq);
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/Unique.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaParser.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaParser.java?rev=423615&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaParser.java (added)
+++ incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaParser.java Wed Jul 19 14:34:44 2006
@@ -0,0 +1,601 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.jdbc.schema;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.LinkedList;
+
+import org.xml.sax.Attributes;
+import org.xml.sax.SAXException;
+import org.apache.openjpa.jdbc.conf.JDBCConfiguration;
+import org.apache.openjpa.jdbc.sql.DBDictionary;
+import org.apache.openjpa.lib.meta.XMLMetaDataParser;
+import org.apache.openjpa.lib.util.Localizer;
+import org.apache.openjpa.util.UserException;
+
+/**
+ * Custom SAX parser used to parse {@link Schema} objects. The parser
+ * will place all parsed schemas into the current {@link SchemaGroup}, set
+ * via the {@link #setSchemaGroup} method. This allows parsing of
+ * multiple files into a single schema group.
+ * The parser deserializes from the following XML format:<br />
+ * <code> <!ELEMENT schemas (schema)+><br />
+ * <!ELEMENT schema (table|sequence)+><br />
+ * <!ATTLIST schema name CDATA #IMPLIED><br />
+ * <!ELEMENT table (column|index|pk|fk|unique)+><br />
+ * <!ATTLIST table name CDATA #REQUIRED><br />
+ * <!ELEMENT column EMPTY><br />
+ * <!ATTLIST column name CDATA #REQUIRED><br />
+ * <!ATTLIST column type (array|bigint|binary|bit|blob|char|clob
+ * |date|decimal|distinct|double|float|integer|java_object
+ * |longvarbinary|longvarchar|null|numeric|other|real|ref|smallint|struct
+ * |time|timstamp|tinyint|varbinary|varchar) #REQUIRED><br />
+ * <!ATTLIST column type-name CDATA #IMPLIED><br />
+ * <!ATTLIST column size CDATA #IMPLIED><br />
+ * <!ATTLIST column decimal-digits CDATA #IMPLIED><br />
+ * <!ATTLIST column not-null (true|false) "false"><br />
+ * <!ATTLIST column default CDATA #IMPLIED><br />
+ * <!ATTLIST column auto-assign (true|false) "false"><br />
+ * <!ELEMENT index (on)*><br />
+ * <!ATTLIST index name CDATA #REQUIRED><br />
+ * <!ATTLIST index column CDATA #IMPLIED><br />
+ * <!ATTLIST index unique (true|false) "false"><br />
+ * <!ELEMENT on EMPTY><br />
+ * <!ATTLIST on column CDATA #REQUIRED><br />
+ * <!ELEMENT pk (on)*><br /> <!ATTLIST pk name CDATA #IMPLIED><br />
+ * <!ATTLIST pk column CDATA #IMPLIED><br />
+ * <!ELEMENT fk (join)*><br />
+ * <!ATTLIST fk name CDATA #IMPLIED><br />
+ * <!ATTLIST fk deferred (true|false) "false"><br />
+ * <!ATTLIST fk column CDATA #IMPLIED><br />
+ * <!ATTLIST fk to-table CDATA #REQUIRED><br />
+ * <!ATTLIST fk delete-action (cascade|default|restrict|none|null)
+ * "none"><br />
+ * <!ATTLIST fk update-action (cascade|default|restrict|none|null)
+ * "none"><br /> <!ELEMENT unique (on)*><br />
+ * <!ATTLIST unique name CDATA #IMPLIED><br />
+ * <!ATTLIST unique column CDATA #IMPLIED><br />
+ * <!ATTLIST unique deferred (true|false) "false"><br />
+ * <!ELEMENT join EMPTY><br />
+ * <!ATTLIST join column CDATA #IMPLIED><br />
+ * <!ATTLIST join value CDATA #IMPLIED><br />
+ * <!ATTLIST join to-column CDATA #REQUIRED><br />
+ * <!ELEMENT sequence EMPTY><br />
+ * <!ATTLIST sequence name CDATA #REQUIRED><br />
+ * <!ATTLIST sequence initial-value CDATA #IMPLIED><br />
+ * <!ATTLIST sequence increment CDATA #IMPLIED><br />
+ * <!ATTLIST sequence allocate CDATA #IMPLIED><br />
+ * </code>
+ * Schema parsers are not threadsafe.
+ *
+ * @author Abe White
+ * @nojavadoc
+ */
+public class XMLSchemaParser
+ extends XMLMetaDataParser
+ implements SchemaParser {
+
+ private static final Localizer _loc = Localizer.forPackage
+ (XMLSchemaParser.class);
+
+ private final DBDictionary _dict;
+
+ // state for current parse
+ private SchemaGroup _group = null;
+ private Schema _schema = null;
+ private Table _table = null;
+ private PrimaryKeyInfo _pk = null;
+ private IndexInfo _index = null;
+ private UniqueInfo _unq = null;
+ private ForeignKeyInfo _fk = null;
+ private boolean _delay = false;
+
+ // used to collect info on schema elements before they're resolved
+ private final Collection _pkInfos = new LinkedList();
+ private final Collection _indexInfos = new LinkedList();
+ private final Collection _unqInfos = new LinkedList();
+ private final Collection _fkInfos = new LinkedList();
+
+ /**
+ * Constructor. Supply configuration.
+ */
+ public XMLSchemaParser(JDBCConfiguration conf) {
+ _dict = conf.getDBDictionaryInstance();
+ setLog(conf.getLog(JDBCConfiguration.LOG_SCHEMA));
+ setParseText(false);
+ setSuffix(".schema");
+ }
+
+ public boolean getDelayConstraintResolve() {
+ return _delay;
+ }
+
+ public void setDelayConstraintResolve(boolean delay) {
+ _delay = delay;
+ }
+
+ public void resolveConstraints() {
+ resolvePrimaryKeys();
+ resolveIndexes();
+ resolveForeignKeys();
+ resolveUniques();
+ clearConstraintInfo();
+ }
+
+ /**
+ * Clear constraint infos.
+ */
+ private void clearConstraintInfo() {
+ _pkInfos.clear();
+ _indexInfos.clear();
+ _fkInfos.clear();
+ _unqInfos.clear();
+ }
+
+ public SchemaGroup getSchemaGroup() {
+ if (_group == null)
+ _group = new SchemaGroup();
+ return _group;
+ }
+
+ public void setSchemaGroup(SchemaGroup group) {
+ _group = group;
+ }
+
+ /**
+ * Parse the schema relating to the given class. The schemas will
+ * be added to the current schema group.
+ */
+ protected void finish() {
+ // now resolve pk, idx, fk info
+ super.finish();
+ if (!_delay)
+ resolveConstraints();
+ }
+
+ /**
+ * Transforms the collected primary key information into actual
+ * primary keys on the schema tables.
+ */
+ private void resolvePrimaryKeys() {
+ PrimaryKeyInfo pkInfo;
+ String colName;
+ Column col;
+ for (Iterator itr = _pkInfos.iterator(); itr.hasNext();) {
+ pkInfo = (PrimaryKeyInfo) itr.next();
+ for (Iterator cols = pkInfo.cols.iterator(); cols.hasNext();) {
+ colName = (String) cols.next();
+ col = pkInfo.pk.getTable().getColumn(colName);
+ if (col == null)
+ throwUserException(_loc.get("pk-resolve", new Object[]
+ { colName, pkInfo.pk.getTable() }));
+ pkInfo.pk.addColumn(col);
+ }
+ }
+ }
+
+ /**
+ * Transforms the collected index information into actual
+ * indexes on the schema tables.
+ */
+ private void resolveIndexes() {
+ IndexInfo indexInfo;
+ String colName;
+ Column col;
+ for (Iterator itr = _indexInfos.iterator(); itr.hasNext();) {
+ indexInfo = (IndexInfo) itr.next();
+ for (Iterator cols = indexInfo.cols.iterator(); cols.hasNext();) {
+ colName = (String) cols.next();
+ col = indexInfo.index.getTable().getColumn(colName);
+ if (col == null)
+ throwUserException(_loc.get("index-resolve", new Object[]
+ { indexInfo.index, colName,
+ indexInfo.index.getTable() }));
+ indexInfo.index.addColumn(col);
+ }
+ }
+ }
+
+ /**
+ * Transforms the collected foreign key information into actual
+ * foreign keys on the schema tables.
+ */
+ private void resolveForeignKeys() {
+ ForeignKeyInfo fkInfo;
+ Table toTable;
+ Column col;
+ String colName;
+ Column pkCol;
+ String pkColName;
+ PrimaryKey pk;
+ Iterator pks;
+ Iterator cols;
+ for (Iterator itr = _fkInfos.iterator(); itr.hasNext();) {
+ fkInfo = (ForeignKeyInfo) itr.next();
+ toTable = _group.findTable(fkInfo.toTable);
+ if (toTable == null || toTable.getPrimaryKey() == null)
+ throwUserException(_loc.get("fk-totable", new Object[]
+ { fkInfo.fk, fkInfo.toTable, fkInfo.fk.getTable() }));
+
+ // check if only one fk column listed using shortcut
+ pk = toTable.getPrimaryKey();
+ if (fkInfo.cols.size() == 1 && fkInfo.pks.size() == 0)
+ fkInfo.pks.add(pk.getColumns()[0].getName());
+
+ // make joins
+ pks = fkInfo.pks.iterator();
+ for (cols = fkInfo.cols.iterator(); cols.hasNext();) {
+ colName = (String) cols.next();
+ col = fkInfo.fk.getTable().getColumn(colName);
+ if (col == null)
+ throwUserException(_loc.get("fk-nocol",
+ fkInfo.fk, colName, fkInfo.fk.getTable()));
+
+ pkColName = (String) pks.next();
+ pkCol = toTable.getColumn(pkColName);
+ if (pkCol == null)
+ throwUserException(_loc.get("fk-nopkcol", new Object[]
+ { fkInfo.fk, pkColName, toTable,
+ fkInfo.fk.getTable() }));
+
+ fkInfo.fk.join(col, pkCol);
+ }
+
+ // make constant joins
+ cols = fkInfo.constCols.iterator();
+ for (Iterator vals = fkInfo.consts.iterator(); vals.hasNext();) {
+ colName = (String) cols.next();
+ col = fkInfo.fk.getTable().getColumn(colName);
+ if (col == null)
+ throwUserException(_loc.get("fk-nocol",
+ fkInfo.fk, colName, fkInfo.fk.getTable()));
+
+ fkInfo.fk.joinConstant(col, vals.next());
+ }
+
+ pks = fkInfo.constColsPK.iterator();
+ for (Iterator vals = fkInfo.constsPK.iterator(); vals.hasNext();) {
+ pkColName = (String) pks.next();
+ pkCol = toTable.getColumn(pkColName);
+ if (pkCol == null)
+ throwUserException(_loc.get("fk-nopkcol", new Object[]
+ { fkInfo.fk, pkColName, toTable,
+ fkInfo.fk.getTable() }));
+
+ fkInfo.fk.joinConstant(vals.next(), pkCol);
+ }
+ }
+ }
+
+ /**
+ * Transforms the collected unique constraint information into actual
+ * constraints on the schema tables.
+ */
+ private void resolveUniques() {
+ UniqueInfo unqInfo;
+ String colName;
+ Column col;
+ for (Iterator itr = _unqInfos.iterator(); itr.hasNext();) {
+ unqInfo = (UniqueInfo) itr.next();
+ for (Iterator cols = unqInfo.cols.iterator(); cols.hasNext();) {
+ colName = (String) cols.next();
+ col = unqInfo.unq.getTable().getColumn(colName);
+ if (col == null)
+ throwUserException(_loc.get("unq-resolve", new Object[]
+ { unqInfo.unq, colName, unqInfo.unq.getTable() }));
+ unqInfo.unq.addColumn(col);
+ }
+ }
+ }
+
+ protected void reset() {
+ _schema = null;
+ _table = null;
+ _pk = null;
+ _index = null;
+ _fk = null;
+ _unq = null;
+ if (!_delay)
+ clearConstraintInfo();
+ }
+
+ protected Reader getDocType()
+ throws IOException {
+ return new InputStreamReader(XMLSchemaParser.class
+ .getResourceAsStream("schemas-doctype.rsrc"));
+ }
+
+ protected boolean startElement(String name, Attributes attrs)
+ throws SAXException {
+ switch (name.charAt(0)) {
+ case's':
+ if ("schema".equals(name))
+ startSchema(attrs);
+ else if ("sequence".equals(name))
+ startSequence(attrs);
+ return true;
+ case't':
+ startTable(attrs);
+ return true;
+ case'c':
+ startColumn(attrs);
+ return true;
+ case'p':
+ startPrimaryKey(attrs);
+ return true;
+ case'i':
+ startIndex(attrs);
+ return true;
+ case'u':
+ startUnique(attrs);
+ return true;
+ case'f':
+ startForeignKey(attrs);
+ return true;
+ case'o':
+ startOn(attrs);
+ return true;
+ case'j':
+ startJoin(attrs);
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ protected void endElement(String name) {
+ switch (name.charAt(0)) {
+ case's':
+ if ("schema".equals(name))
+ endSchema();
+ break;
+ case't':
+ endTable();
+ break;
+ case'p':
+ endPrimaryKey();
+ break;
+ case'i':
+ endIndex();
+ break;
+ case'u':
+ endUnique();
+ break;
+ case'f':
+ endForeignKey();
+ break;
+ }
+ }
+
+ private void startSchema(Attributes attrs) {
+ // creates group if not set
+ SchemaGroup group = getSchemaGroup();
+
+ String name = attrs.getValue("name");
+ _schema = group.getSchema(name);
+ if (_schema == null)
+ _schema = group.addSchema(name);
+ }
+
+ private void endSchema() {
+ _schema = null;
+ }
+
+ private void startSequence(Attributes attrs) {
+ Sequence seq = _schema.addSequence(attrs.getValue("name"));
+ seq.setSource(getSourceFile(), seq.SRC_XML);
+ try {
+ String val = attrs.getValue("initial-value");
+ if (val != null)
+ seq.setInitialValue(Integer.parseInt(val));
+ val = attrs.getValue("increment");
+ if (val != null)
+ seq.setIncrement(Integer.parseInt(val));
+ val = attrs.getValue("allocate");
+ if (val != null)
+ seq.setAllocate(Integer.parseInt(val));
+ } catch (NumberFormatException nfe) {
+ throwUserException(_loc.get("bad-seq-num", seq.getFullName()));
+ }
+ }
+
+ private void startTable(Attributes attrs) {
+ _table = _schema.addTable(attrs.getValue("name"));
+ _table.setSource(getSourceFile(), _table.SRC_XML);
+ }
+
+ private void endTable() {
+ _table = null;
+ }
+
+ private void startColumn(Attributes attrs) {
+ Column col = _table.addColumn(attrs.getValue("name"));
+ col.setType(_dict.getPreferredType(Schemas.getJDBCType
+ (attrs.getValue("type"))));
+ col.setTypeName(attrs.getValue("type-name"));
+ String val = attrs.getValue("size");
+ if (val != null)
+ col.setSize(Integer.parseInt(val));
+ val = attrs.getValue("decimal-digits");
+ if (val != null)
+ col.setDecimalDigits(Integer.parseInt(val));
+ col.setNotNull("true".equals(attrs.getValue("not-null")));
+ col.setAutoAssigned("true".equals(attrs.getValue("auto-assign"))
+ || "true".equals(attrs.getValue("auto-increment"))); // old attr
+ col.setDefaultString(attrs.getValue("default"));
+ }
+
+ private void startPrimaryKey(Attributes attrs) {
+ _pk = new PrimaryKeyInfo();
+ _pk.pk = _table.addPrimaryKey(attrs.getValue("name"));
+ _pk.pk.setLogical("true".equals(attrs.getValue("logical")));
+
+ String val = attrs.getValue("column");
+ if (val != null)
+ _pk.cols.add(val);
+ }
+
+ private void endPrimaryKey() {
+ _pkInfos.add(_pk);
+ _pk = null;
+ }
+
+ private void startIndex(Attributes attrs) {
+ _index = new IndexInfo();
+ _index.index = _table.addIndex(attrs.getValue("name"));
+ _index.index.setUnique("true".equals(attrs.getValue("unique")));
+
+ String val = attrs.getValue("column");
+ if (val != null)
+ _index.cols.add(val);
+ }
+
+ private void endIndex() {
+ _indexInfos.add(_index);
+ _index = null;
+ }
+
+ private void startUnique(Attributes attrs) {
+ _unq = new UniqueInfo();
+ _unq.unq = _table.addUnique(attrs.getValue("name"));
+ _unq.unq.setDeferred("true".equals(attrs.getValue("deferred")));
+
+ String val = attrs.getValue("column");
+ if (val != null)
+ _unq.cols.add(val);
+ }
+
+ private void endUnique() {
+ _unqInfos.add(_unq);
+ _unq = null;
+ }
+
+ private void startForeignKey(Attributes attrs) {
+ _fk = new ForeignKeyInfo();
+ _fk.fk = _table.addForeignKey(attrs.getValue("name"));
+
+ if ("true".equals(attrs.getValue("deferred")))
+ _fk.fk.setDeferred(true);
+
+ // set update action before delete action in case user incorrectly
+ // sets update-action to "none" when there is a delete-action; otherwise
+ // setting the update-action to "none" will also automatically set the
+ // delete-action to "none", since FKs cannot have one actio be none and
+ // the other be non-none
+ String action = attrs.getValue("update-action");
+ if (action != null)
+ _fk.fk.setUpdateAction(ForeignKey.getAction(action));
+ action = attrs.getValue("delete-action");
+ if (action != null)
+ _fk.fk.setDeleteAction(ForeignKey.getAction(action));
+
+ _fk.toTable = attrs.getValue("to-table");
+ String val = attrs.getValue("column");
+ if (val != null)
+ _fk.cols.add(val);
+ }
+
+ private void endForeignKey() {
+ _fkInfos.add(_fk);
+ _fk = null;
+ }
+
+ private void startOn(Attributes attrs) {
+ String col = attrs.getValue("column");
+ if (_pk != null)
+ _pk.cols.add(col);
+ else if (_index != null)
+ _index.cols.add(col);
+ else
+ _unq.cols.add(col);
+ }
+
+ private void startJoin(Attributes attrs) {
+ String col = attrs.getValue("column");
+ String toCol = attrs.getValue("to-column");
+ String val = attrs.getValue("value");
+ if (val == null) {
+ _fk.cols.add(col);
+ _fk.pks.add(toCol);
+ } else if (col == null) {
+ _fk.constsPK.add(convertConstant(val));
+ _fk.constColsPK.add(toCol);
+ } else {
+ _fk.consts.add(convertConstant(val));
+ _fk.constCols.add(col);
+ }
+ }
+
+ private static Object convertConstant(String val) {
+ if ("null".equals(val))
+ return null;
+ if (val.startsWith("'"))
+ return val.substring(1, val.length() - 1);
+ if (val.indexOf('.') == -1)
+ return new Long(val);
+ return new Double(val);
+ }
+
+ private void throwUserException(String msg) {
+ throw new UserException(getSourceName() + ": " + msg);
+ }
+
+ /**
+ * Used to hold primary key info before it is resolved.
+ */
+ private static class PrimaryKeyInfo {
+
+ public PrimaryKey pk = null;
+ public Collection cols = new LinkedList();
+ }
+
+ /**
+ * Used to hold index info before it is resolved.
+ */
+ private static class IndexInfo {
+
+ public Index index = null;
+ public Collection cols = new LinkedList();
+ }
+
+ /**
+ * Used to hold unique constraint info before it is resolved.
+ */
+ private static class UniqueInfo {
+
+ public Unique unq = null;
+ public Collection cols = new LinkedList();
+ }
+
+ /**
+ * Used to hold foreign key info before it is resolved.
+ */
+ private static class ForeignKeyInfo {
+
+ public ForeignKey fk = null;
+ public String toTable = null;
+ public Collection cols = new LinkedList();
+ public Collection pks = new LinkedList();
+ public Collection consts = new LinkedList();
+ public Collection constCols = new LinkedList();
+ public Collection constsPK = new LinkedList();
+ public Collection constColsPK = new LinkedList();
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaParser.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaSerializer.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaSerializer.java?rev=423615&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaSerializer.java (added)
+++ incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaSerializer.java Wed Jul 19 14:34:44 2006
@@ -0,0 +1,444 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.jdbc.schema;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeSet;
+
+import org.xml.sax.SAXException;
+import org.apache.openjpa.jdbc.conf.JDBCConfiguration;
+import org.apache.openjpa.lib.meta.XMLMetaDataSerializer;
+import org.apache.openjpa.lib.util.Localizer;
+
+/**
+ * Serializes {@link Schema}s to XML matching the document
+ * type definition defined by the {@link XMLSchemaParser}. The serializer
+ * actually works at the fine-grained table level to allow you to split
+ * schemas among multiple files.
+ * Serializers are not thread safe.
+ *
+ * @author Abe White
+ * @nojavadoc
+ */
+public class XMLSchemaSerializer
+ extends XMLMetaDataSerializer
+ implements SchemaSerializer {
+
+ private static final Localizer _loc = Localizer.forPackage
+ (XMLSchemaSerializer.class);
+
+ private final JDBCConfiguration _conf;
+ private final Collection _tables = new TreeSet();
+ private final Collection _seqs = new TreeSet();
+
+ /**
+ * Constructor. Supply configuration.
+ */
+ public XMLSchemaSerializer(JDBCConfiguration conf) {
+ setLog(conf.getLog(JDBCConfiguration.LOG_SCHEMA));
+ _conf = conf;
+ }
+
+ public Table[] getTables() {
+ return (Table[]) _tables.toArray(new Table[_tables.size()]);
+ }
+
+ public void addTable(Table table) {
+ if (table != null)
+ _tables.add(table);
+ }
+
+ public boolean removeTable(Table table) {
+ return _tables.remove(table);
+ }
+
+ public Sequence[] getSequences() {
+ return (Sequence[]) _seqs.toArray(new Sequence[_seqs.size()]);
+ }
+
+ public void addSequence(Sequence seq) {
+ if (seq != null)
+ _seqs.add(seq);
+ }
+
+ public boolean removeSequence(Sequence seq) {
+ return _seqs.remove(seq);
+ }
+
+ public void addAll(Schema schema) {
+ if (schema == null)
+ return;
+ Table[] tables = schema.getTables();
+ for (int i = 0; i < tables.length; i++)
+ addTable(tables[i]);
+ Sequence[] seqs = schema.getSequences();
+ for (int i = 0; i < seqs.length; i++)
+ addSequence(seqs[i]);
+ }
+
+ public void addAll(SchemaGroup group) {
+ if (group == null)
+ return;
+ Schema[] schemas = group.getSchemas();
+ for (int i = 0; i < schemas.length; i++)
+ addAll(schemas[i]);
+ }
+
+ public boolean removeAll(Schema schema) {
+ if (schema == null)
+ return false;
+
+ boolean removed = false;
+ Table[] tables = schema.getTables();
+ for (int i = 0; i < tables.length; i++)
+ removed |= removeTable(tables[i]);
+ Sequence[] seqs = schema.getSequences();
+ for (int i = 0; i < seqs.length; i++)
+ removed |= removeSequence(seqs[i]);
+ return removed;
+ }
+
+ public boolean removeAll(SchemaGroup group) {
+ if (group == null)
+ return false;
+
+ boolean removed = false;
+ Schema[] schemas = group.getSchemas();
+ for (int i = 0; i < schemas.length; i++)
+ removed |= removeAll(schemas[i]);
+ return removed;
+ }
+
+ public void clear() {
+ _tables.clear();
+ _seqs.clear();
+ }
+
+ protected Collection getObjects() {
+ if (_seqs.isEmpty())
+ return _tables;
+ if (_tables.isEmpty())
+ return _seqs;
+ List all = new ArrayList(_seqs.size() + _tables.size());
+ all.addAll(_seqs);
+ all.addAll(_tables);
+ return all;
+ }
+
+ protected void serialize(Collection objs)
+ throws SAXException {
+ // group the objects by schema
+ Map schemas = new HashMap();
+ String schemaName;
+ Collection schemaObjs;
+ Object obj;
+ for (Iterator itr = objs.iterator(); itr.hasNext();) {
+ obj = itr.next();
+ if (obj instanceof Table)
+ schemaName = ((Table) obj).getSchemaName();
+ else
+ schemaName = ((Sequence) obj).getSchemaName();
+ schemaObjs = (Collection) schemas.get(schemaName);
+ if (schemaObjs == null) {
+ schemaObjs = new LinkedList();
+ schemas.put(schemaName, schemaObjs);
+ }
+ schemaObjs.add(obj);
+ }
+
+ startElement("schemas");
+ Map.Entry entry;
+ for (Iterator itr = schemas.entrySet().iterator(); itr.hasNext();) {
+ entry = (Map.Entry) itr.next();
+ serializeSchema((String) entry.getKey(), (Collection)
+ entry.getValue());
+ }
+ endElement("schemas");
+ }
+
+ /**
+ * Serializes the given objects together into the current schema.
+ */
+ private void serializeSchema(String name, Collection objs)
+ throws SAXException {
+ if (objs.isEmpty())
+ return;
+
+ if (getLog().isTraceEnabled())
+ getLog().trace(_loc.get("ser-schema", name));
+
+ if (name != null)
+ addAttribute("name", name);
+ startElement("schema");
+
+ // tables and seqs
+ Object obj;
+ for (Iterator itr = objs.iterator(); itr.hasNext();) {
+ obj = itr.next();
+ if (obj instanceof Table)
+ serializeTable((Table) obj);
+ else
+ serializeSequence((Sequence) obj);
+ }
+
+ endElement("schema");
+ }
+
+ /**
+ * Serialize the given sequence.
+ */
+ private void serializeSequence(Sequence seq)
+ throws SAXException {
+ addAttribute("name", seq.getName());
+ if (seq.getInitialValue() != 1)
+ addAttribute("initial-value",
+ String.valueOf(seq.getInitialValue()));
+ if (seq.getIncrement() > 1)
+ addAttribute("increment", String.valueOf(seq.getIncrement()));
+ if (seq.getAllocate() > 1)
+ addAttribute("allocate", String.valueOf(seq.getAllocate()));
+ startElement("sequence");
+ endElement("sequence");
+ }
+
+ /**
+ * Serializes the given table.
+ */
+ private void serializeTable(Table table)
+ throws SAXException {
+ addAttribute("name", table.getName());
+ startElement("table");
+
+ // primary key
+ PrimaryKey pk = table.getPrimaryKey();
+ if (pk != null)
+ serializePrimaryKey(pk);
+
+ // columns
+ Column[] cols = table.getColumns();
+ for (int i = 0; i < cols.length; i++)
+ serializeColumn(cols[i]);
+
+ // foreign keys
+ ForeignKey[] fks = table.getForeignKeys();
+ for (int i = 0; i < fks.length; i++)
+ serializeForeignKey(fks[i]);
+
+ // indexes
+ Index[] idxs = table.getIndexes();
+ for (int i = 0; i < idxs.length; i++)
+ serializeIndex(idxs[i]);
+
+ // unique constraints
+ Unique[] unqs = table.getUniques();
+ for (int i = 0; i < unqs.length; i++)
+ serializeUnique(unqs[i]);
+
+ endElement("table");
+ }
+
+ /**
+ * Serializes the given column.
+ */
+ private void serializeColumn(Column col)
+ throws SAXException {
+ addAttribute("name", col.getName());
+ addAttribute("type", Schemas.getJDBCName(col.getType()));
+ if (col.getTypeName() != null && col.getTypeName().length() > 0
+ && !col.getTypeName().equalsIgnoreCase
+ (Schemas.getJDBCName(col.getType())))
+ addAttribute("type-name", col.getTypeName());
+ if (col.isNotNull())
+ addAttribute("not-null", "true");
+ if (col.isAutoAssigned())
+ addAttribute("auto-assign", "true");
+ if (col.getDefaultString() != null)
+ addAttribute("default", col.getDefaultString());
+ if (col.getSize() != 0)
+ addAttribute("size", String.valueOf(col.getSize()));
+ if (col.getDecimalDigits() != 0)
+ addAttribute("decimal-digits", String.valueOf
+ (col.getDecimalDigits()));
+ startElement("column");
+ endElement("column");
+ }
+
+ /**
+ * Serializes the given primary key.
+ */
+ private void serializePrimaryKey(PrimaryKey pk)
+ throws SAXException {
+ if (pk.getName() != null)
+ addAttribute("name", pk.getName());
+ if (pk.isLogical())
+ addAttribute("logical", "true");
+
+ Column[] cols = pk.getColumns();
+ if (cols.length == 1)
+ addAttribute("column", cols[0].getName());
+ startElement("pk");
+
+ // columns
+ if (cols.length > 1)
+ for (int i = 0; i < cols.length; i++)
+ serializeOn(cols[i]);
+
+ endElement("pk");
+ }
+
+ /**
+ * Serializes the given index.
+ */
+ private void serializeIndex(Index idx)
+ throws SAXException {
+ addAttribute("name", idx.getName());
+ if (idx.isUnique())
+ addAttribute("unique", "true");
+ Column[] cols = idx.getColumns();
+ if (cols.length == 1)
+ addAttribute("column", cols[0].getName());
+ startElement("index");
+
+ // columns
+ if (cols.length > 1)
+ for (int i = 0; i < cols.length; i++)
+ serializeOn(cols[i]);
+
+ endElement("index");
+ }
+
+ /**
+ * Serializes the given constraint.
+ */
+ private void serializeUnique(Unique unq)
+ throws SAXException {
+ if (unq.getName() != null)
+ addAttribute("name", unq.getName());
+ if (unq.isDeferred())
+ addAttribute("deferred", "true");
+ Column[] cols = unq.getColumns();
+ if (cols.length == 1)
+ addAttribute("column", cols[0].getName());
+ startElement("unique");
+
+ // columns
+ if (cols.length > 1)
+ for (int i = 0; i < cols.length; i++)
+ serializeOn(cols[i]);
+
+ endElement("unique");
+ }
+
+ /**
+ * Serializes the given foreign key.
+ */
+ private void serializeForeignKey(ForeignKey fk)
+ throws SAXException {
+ if (fk.getName() != null)
+ addAttribute("name", fk.getName());
+
+ if (fk.isDeferred())
+ addAttribute("deferred", "true");
+
+ if (fk.getDeleteAction() != ForeignKey.ACTION_NONE)
+ addAttribute("delete-action", ForeignKey.getActionName
+ (fk.getDeleteAction()));
+ if (fk.getUpdateAction() != ForeignKey.ACTION_NONE
+ && fk.getUpdateAction() != ForeignKey.ACTION_RESTRICT)
+ addAttribute("update-action", ForeignKey.getActionName
+ (fk.getUpdateAction()));
+
+ Column[] cols = fk.getColumns();
+ Column[] pks = fk.getPrimaryKeyColumns();
+ Column[] consts = fk.getConstantColumns();
+ Column[] constsPK = fk.getConstantPrimaryKeyColumns();
+ addAttribute("to-table", fk.getPrimaryKeyTable().getFullName());
+ if (cols.length == 1 && consts.length == 0 && constsPK.length == 0)
+ addAttribute("column", cols[0].getName());
+ startElement("fk");
+
+ // columns
+ if (cols.length > 1 || consts.length > 0 || constsPK.length > 0)
+ for (int i = 0; i < cols.length; i++)
+ serializeJoin(cols[i], pks[i]);
+ for (int i = 0; i < consts.length; i++)
+ serializeJoin(consts[i], fk.getConstant(consts[i]));
+ for (int i = 0; i < constsPK.length; i++)
+ serializeJoin(fk.getPrimaryKeyConstant(constsPK[i]), constsPK[i]);
+
+ endElement("fk");
+ }
+
+ /**
+ * Serializes the given column to an 'on' element.
+ */
+ private void serializeOn(Column col)
+ throws SAXException {
+ addAttribute("column", col.getName());
+ startElement("on");
+ endElement("on");
+ }
+
+ /**
+ * Serializes the given columns to a 'join' element.
+ */
+ private void serializeJoin(Column col, Column pk)
+ throws SAXException {
+ addAttribute("column", col.getName());
+ addAttribute("to-column", pk.getName());
+ startElement("join");
+ endElement("join");
+ }
+
+ /**
+ * Serializes the given values to a 'join' element.
+ */
+ private void serializeJoin(Object val, Column pk)
+ throws SAXException {
+ addAttribute("value", stringifyConstant(val));
+ addAttribute("to-column", pk.getName());
+ startElement("join");
+ endElement("join");
+ }
+
+ /**
+ * Serializes the given values to a 'join' element.
+ */
+ private void serializeJoin(Column col, Object val)
+ throws SAXException {
+ addAttribute("column", col.getName());
+ addAttribute("value", stringifyConstant(val));
+ startElement("join");
+ endElement("join");
+ }
+
+ /**
+ * Stringify the given constant value.
+ */
+ private static String stringifyConstant(Object val) {
+ if (val == null)
+ return "null";
+ if (val instanceof String)
+ return "'" + val + "'";
+ return val.toString();
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/XMLSchemaSerializer.java
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/package.html
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/package.html?rev=423615&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/package.html (added)
+++ incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/package.html Wed Jul 19 14:34:44 2006
@@ -0,0 +1,16 @@
+<html>
+<body>
+<p><strong>OpenJPA-JDBC Schema Management</strong></p>
+
+<p>
+ This package provides utilities for managing the schema of
+ persistent objects stored in a relational database.
+</p>
+
+<p>
+ Schema components themselves are little more than standard Java beans.
+ Helper classes are responsible for added functionality such as XML
+ serialization/deserialization, etc.
+</p>
+</body>
+</html>
Propchange: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/schema/package.html
------------------------------------------------------------------------------
svn:executable = *
Added: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/sql/AbstractDB2Dictionary.java
URL: http://svn.apache.org/viewvc/incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/sql/AbstractDB2Dictionary.java?rev=423615&view=auto
==============================================================================
--- incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/sql/AbstractDB2Dictionary.java (added)
+++ incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/sql/AbstractDB2Dictionary.java Wed Jul 19 14:34:44 2006
@@ -0,0 +1,95 @@
+/*
+ * Copyright 2006 The Apache Software Foundation.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.openjpa.jdbc.sql;
+
+import org.apache.openjpa.jdbc.kernel.exps.FilterValue;
+
+/**
+ * Base dictionary for the IBM DB2 family of databases.
+ */
+public abstract class AbstractDB2Dictionary
+ extends DBDictionary {
+
+ public AbstractDB2Dictionary() {
+ numericTypeName = "DOUBLE";
+ bitTypeName = "SMALLINT";
+ smallintTypeName = "SMALLINT";
+ tinyintTypeName = "SMALLINT";
+
+ toUpperCaseFunction = "UPPER(CAST({0} AS VARCHAR(1000)))";
+ toLowerCaseFunction = "LOWER(CAST({0} AS VARCHAR(1000)))";
+ stringLengthFunction = "LENGTH(CAST({0} AS VARCHAR(1000)))";
+
+ trimLeadingFunction = "LTRIM({0})";
+ trimTrailingFunction = "RTRIM({0})";
+ trimBothFunction = "LTRIM(RTRIM({0}))";
+
+ // in DB2, "for update" seems to be ignored with isolation
+ // levels below REPEATABLE_READ... force isolation to behave like RR
+ forUpdateClause = "FOR UPDATE WITH RR";
+
+ supportsLockingWithDistinctClause = false;
+ supportsLockingWithMultipleTables = false;
+ supportsLockingWithOrderClause = false;
+ supportsLockingWithOuterJoin = false;
+ supportsLockingWithInnerJoin = false;
+ supportsLockingWithSelectRange = false;
+
+ requiresAutoCommitForMetaData = true;
+ requiresAliasForSubselect = true;
+
+ supportsAutoAssign = true;
+ autoAssignClause = "GENERATED BY DEFAULT AS IDENTITY";
+ lastGeneratedKeyQuery = "VALUES(IDENTITY_VAL_LOCAL())";
+
+ // DB2 doesn't understand "X CROSS JOIN Y", but it does understand
+ // the equivalent "X JOIN Y ON 1 = 1"
+ crossJoinClause = "JOIN";
+ requiresConditionForCrossJoin = true;
+ }
+
+ public void indexOf(SQLBuffer buf, FilterValue str, FilterValue find,
+ FilterValue start) {
+ buf.append("(LOCATE(CAST((");
+ find.appendTo(buf);
+ buf.append(") AS VARCHAR(1000)), CAST((");
+ str.appendTo(buf);
+ buf.append(") AS VARCHAR(1000))");
+ if (start != null) {
+ buf.append(", CAST((");
+ start.appendTo(buf);
+ buf.append(") AS INTEGER) + 1");
+ }
+ buf.append(") - 1)");
+ }
+
+ public void substring(SQLBuffer buf, FilterValue str, FilterValue start,
+ FilterValue end) {
+ buf.append("SUBSTR(CAST((");
+ str.appendTo(buf);
+ buf.append(") AS VARCHAR(1000)), CAST((");
+ start.appendTo(buf);
+ buf.append(") AS INTEGER) + 1");
+ if (end != null) {
+ buf.append(", CAST((");
+ end.appendTo(buf);
+ buf.append(") AS INTEGER) - CAST((");
+ start.appendTo(buf);
+ buf.append(") AS INTEGER)");
+ }
+ buf.append(")");
+ }
+}
Propchange: incubator/openjpa/trunk/openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/sql/AbstractDB2Dictionary.java
------------------------------------------------------------------------------
svn:executable = *