db-ojb-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From to...@apache.org
Subject cvs commit: db-ojb build.xml
Date Thu, 22 Jul 2004 07:48:53 GMT
tomdz       2004/07/22 00:48:53

  Modified:    src/java/org/apache/ojb/broker/util/dbhandling
                        CommonsSqlDatabaseHandling.java
                        DatabaseHandling.java DatabaseHandlingTask.java
                        TorqueDatabaseHandling.java
               lib      commons-sql-1.0-dev.jar
               .        build.xml
  Added:       src/java/org/apache/ojb/broker/util/dbhandling
                        CommonsSqlDataHandling.java PreparedModel.java
  Log:
  Added ability to generate a DTD from the repository model, and to generate sql for or directly insert data that follows this DTD
  
  Revision  Changes    Path
  1.3       +100 -27   db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/CommonsSqlDatabaseHandling.java
  
  Index: CommonsSqlDatabaseHandling.java
  ===================================================================
  RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/CommonsSqlDatabaseHandling.java,v
  retrieving revision 1.2
  retrieving revision 1.3
  diff -u -r1.2 -r1.3
  --- CommonsSqlDatabaseHandling.java	18 Jul 2004 21:51:05 -0000	1.2
  +++ CommonsSqlDatabaseHandling.java	22 Jul 2004 07:48:52 -0000	1.3
  @@ -16,10 +16,7 @@
    */
   
   import java.io.*;
  -import java.sql.Connection;
  -import java.util.ArrayList;
   import java.util.HashMap;
  -import java.util.Iterator;
   import java.util.StringTokenizer;
   
   import org.apache.commons.sql.builder.SqlBuilder;
  @@ -30,6 +27,7 @@
   import org.apache.commons.sql.model.Database;
   import org.apache.commons.sql.util.DDLExecutor;
   import org.apache.commons.sql.util.DataSourceWrapper;
  +import org.apache.ojb.broker.metadata.DescriptorRepository;
   import org.apache.ojb.broker.metadata.JdbcConnectionDescriptor;
   
   /**
  @@ -64,12 +62,14 @@
   
       /** The database connection descriptor */
       private JdbcConnectionDescriptor _jcd;
  +    /** The model */
  +    private DescriptorRepository _model;
       /** The datasource for commons-sql */
       private DataSourceWrapper _dataSource;
       /** The database sql builder */
       private SqlBuilder _builder;
  -    /** The database schemas */
  -    private ArrayList _schemas = new ArrayList();
  +    /** The database schema */
  +    private Database _schema = new Database();
       /** Performs SQL statements */
       private DDLExecutor _ddlExecutor;
   
  @@ -140,6 +140,22 @@
       }
   
       /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getMetadata()
  +     */
  +    public DescriptorRepository getMetadata()
  +    {
  +        return _model;
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#setMetadata(org.apache.ojb.broker.metadata.DescriptorRepository)
  +     */
  +    public void setMetadata(DescriptorRepository metaModel)
  +    {
  +        _model = metaModel;
  +    }
  +
  +    /* (non-Javadoc)
        * @see org.apache.ojb.broker.util.dbhandling.DBHandling#addSchemaFiles(java.lang.String, java.lang.String)
        */
       public void addSchemaFiles(String srcDir, String listOfFilenames) throws IOException, DatabaseHandlingException
  @@ -159,7 +175,7 @@
                   token = tokenizer.nextToken();
                   if (token.length() > 0)
                   {
  -                    _schemas.add(dbReader.parse(baseDir + token));
  +                    _schema.mergeWith((Database)dbReader.parse(baseDir + token));
                   }
               }
           }
  @@ -183,7 +199,7 @@
       {
           try
           {
  -            _schemas.add(new DatabaseReader().parse(reader));
  +            _schema.mergeWith((Database)new DatabaseReader().parse(reader));
           }
           catch (Exception ex)
           {
  @@ -196,6 +212,10 @@
                   throw new DatabaseHandlingException(ex);
               }
           }
  +        finally
  +        {
  +            reader.close();
  +        }
       }
   
       /* (non-Javadoc)
  @@ -205,10 +225,7 @@
       {
           try
           {
  -            for (Iterator it = _schemas.iterator(); it.hasNext();)
  -            {
  -                _ddlExecutor.createDatabase((Database)it.next(), true);
  -            }
  +            _ddlExecutor.createDatabase(_schema, true);
           }
           catch (Exception ex)
           {
  @@ -225,7 +242,7 @@
   
           try
           {
  -            getAlterationSql(writer);
  +            getAlterDatabaseSql(writer);
               _ddlExecutor.evaluateBatch(writer.getBuffer().toString());
           }
           catch (Exception ex)
  @@ -271,7 +288,7 @@
       /* (non-Javadoc)
        * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getDropSql(java.io.Writer)
        */
  -    public void getDropSql(Writer writer) throws DatabaseHandlingException
  +    public void getDropDatabaseSql(Writer writer) throws DatabaseHandlingException
       {
           _builder.setWriter(writer);
           try
  @@ -289,18 +306,12 @@
       /* (non-Javadoc)
        * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getAlterationSql(java.io.Writer)
        */
  -    public void getAlterationSql(Writer writer) throws DatabaseHandlingException
  +    public void getAlterDatabaseSql(Writer writer) throws DatabaseHandlingException
       {
           _builder.setWriter(writer);
           try
           {
  -            Connection conn = _dataSource.getConnection();
  -
  -            // TODO: Does this work ?
  -            for (Iterator it = _schemas.iterator(); it.hasNext();)
  -            {
  -                _builder.alterDatabase((Database)it.next(), conn, true, true);
  -            }
  +            _builder.alterDatabase(_schema, _dataSource.getConnection(), true, true);
           }
           catch (Exception ex)
           {
  @@ -311,20 +322,82 @@
       /* (non-Javadoc)
        * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getCreationSql(java.io.Writer)
        */
  -    public void getCreationSql(Writer writer) throws DatabaseHandlingException
  +    public void getCreateDatabaseSql(Writer writer) throws DatabaseHandlingException
       {
           _builder.setWriter(writer);
           try
           {
  -            // TODO: Does this work ?
  -            for (Iterator it = _schemas.iterator(); it.hasNext();)
  -            {
  -                _builder.createDatabase((Database)it.next(), true);
  -            }
  +            _builder.createDatabase(_schema, true);
           }
           catch (Exception ex)
           {
               throw new DatabaseHandlingException(ex);
           }
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#addDataFile(java.io.Reader)
  +     */
  +    public void insertData(Reader reader) throws IOException, DatabaseHandlingException
  +    {
  +        new CommonsSqlDataHandling(_schema, _model).insertData(reader, _ddlExecutor);
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#addDataFiles(java.lang.String, java.lang.String)
  +     */
  +    public void insertData(String srcDir, String listOfFilenames) throws IOException, DatabaseHandlingException
  +    {
  +        CommonsSqlDataHandling dataHandling = new CommonsSqlDataHandling(_schema, _model);
  +        StringTokenizer        tokenizer    = new StringTokenizer(listOfFilenames, ",");
  +        String                 baseDir      = ((srcDir != null) && (srcDir.length() > 0) ? srcDir : ".") + "/";
  +        String                 token;
  +        String                 sql;
  +
  +        while (tokenizer.hasMoreTokens())
  +        {
  +            token = tokenizer.nextToken();
  +            if (token.length() > 0)
  +            {
  +                dataHandling.insertData(new FileReader(baseDir + token), _ddlExecutor);
  +            }
  +        }
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getInsertDataSql(java.io.Reader, java.io.Writer)
  +     */
  +    public void getInsertDataSql(Reader reader, Writer writer) throws DatabaseHandlingException
  +    {
  +        new CommonsSqlDataHandling(_schema, _model).getInsertDataSql(reader, writer);
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getInsertDataSql(java.lang.String, java.lang.String, java.io.Writer)
  +     */
  +    public void getInsertDataSql(String srcDir, String listOfFilenames, Writer writer) throws IOException, DatabaseHandlingException
  +    {
  +        CommonsSqlDataHandling dataHandling = new CommonsSqlDataHandling(_schema, _model);
  +        StringTokenizer        tokenizer    = new StringTokenizer(listOfFilenames, ",");
  +        String                 baseDir      = ((srcDir != null) && (srcDir.length() > 0) ? srcDir : ".") + "/";
  +        String                 token;
  +        String                 sql;
  +
  +        while (tokenizer.hasMoreTokens())
  +        {
  +            token = tokenizer.nextToken();
  +            if (token.length() > 0)
  +            {
  +                dataHandling.getInsertDataSql(new FileReader(baseDir + token), writer);
  +            }
  +        }
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getDataDTD(java.io.Writer)
  +     */
  +    public void getDataDTD(Writer writer) throws DatabaseHandlingException
  +    {
  +        new CommonsSqlDataHandling(_schema, _model).getDataDTD(writer);
       }
   }
  
  
  
  1.2       +85 -17    db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/DatabaseHandling.java
  
  Index: DatabaseHandling.java
  ===================================================================
  RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/DatabaseHandling.java,v
  retrieving revision 1.1
  retrieving revision 1.2
  diff -u -r1.1 -r1.2
  --- DatabaseHandling.java	4 Jul 2004 21:45:51 -0000	1.1
  +++ DatabaseHandling.java	22 Jul 2004 07:48:52 -0000	1.2
  @@ -17,6 +17,7 @@
   
   import java.io.*;
   
  +import org.apache.ojb.broker.metadata.DescriptorRepository;
   import org.apache.ojb.broker.metadata.JdbcConnectionDescriptor;
   
   /**
  @@ -50,6 +51,20 @@
       public JdbcConnectionDescriptor getConnection();
   
       /**
  +     * Sets the metamodel to use for the data and dump methods.
  +     * 
  +     * @param metaModel The metamodel
  +     */
  +    public void setMetadata(DescriptorRepository metaModel);
  +
  +    /**
  +     * Returns the metamodel used for the data and dump methods.
  +     * 
  +     * @return The metamodel
  +     */
  +    public DescriptorRepository getMetadata();
  +    
  +    /**
        * Adds db definition files to use.
        * 
        * @param srcDir          The directory containing the files
  @@ -58,7 +73,8 @@
       public void addSchemaFiles(String srcDir, String listOfFilenames) throws IOException, DatabaseHandlingException;
   
       /**
  -     * Adds an input stream containg part of the db definition to use.
  +     * Adds an input stream containg part of the db definition to use. Note that you should not use
  +     * the reader after passing it to this method, especially not close it (this is done automatically). 
        * 
        * @param reader A reader returning the schema
        */
  @@ -73,23 +89,23 @@
       public void createDatabase() throws DatabaseHandlingException;
   
       /**
  -     * Generates SQL for dropping the database, and writes the SQL to the given
  -     * output stream. Note that this does not modify the database.
  +     * Generates SQL for creating a new database according to the schema, and
  +     * writes the SQL to the given output stream. Note that this does not
  +     * access the database in any way.
        * 
        * @param writer The writer to write the schema to
        * @throws DatabaseHandlingException If some error occurred
        */
  -    public void getDropSql(Writer writer) throws DatabaseHandlingException;
  -    
  +    public void getCreateDatabaseSql(Writer writer) throws DatabaseHandlingException;
  +
       /**
  -     * Generates SQL for creating a new database according to the schema, and
  -     * writes the SQL to the given output stream. Note that this does not
  -     * access the database in any way.
  +     * Alters an existing database. If it already does not exist yet, then it's
  +     * created, else the data in it is maintained as much as possible (depending
  +     * on the structural changes).
        * 
  -     * @param writer The writer to write the schema to
        * @throws DatabaseHandlingException If some error occurred
        */
  -    public void getCreationSql(Writer writer) throws DatabaseHandlingException;
  +    public void alterDatabase() throws DatabaseHandlingException;
   
       /**
        * Generates SQL for altering a new database according to the schema, and
  @@ -99,23 +115,75 @@
        * @param writer The writer to write the schema to
        * @throws DatabaseHandlingException If some error occurred
        */
  -    public void getAlterationSql(Writer writer) throws DatabaseHandlingException;
  +    public void getAlterDatabaseSql(Writer writer) throws DatabaseHandlingException;
   
       /**
  -     * Alters an existing database. If it already does not exist yet, then it's
  -     * created, else the data in it is maintained as much as possible (depending
  -     * on the structural changes).
  +     * Drops the database.
        * 
        * @throws DatabaseHandlingException If some error occurred
        */
  -    public void alterDatabase() throws DatabaseHandlingException;
  +    public void dropDatabase() throws DatabaseHandlingException;
   
       /**
  -     * Drops the database.
  +     * Generates SQL for dropping the database, and writes the SQL to the given
  +     * output stream. Note that this does not modify the database.
        * 
  +     * @param writer The writer to write the schema to
        * @throws DatabaseHandlingException If some error occurred
        */
  -    public void dropDatabase() throws DatabaseHandlingException;
  +    public void getDropDatabaseSql(Writer writer) throws DatabaseHandlingException;
  +
  +    /**
  +     * Adds the data contained in the specified data files. Note that the data is expected to match the repository
  +     * metadata (not the table schema).
  +     * 
  +     * @param srcDir          The directory containing the files
  +     * @param listOfFilenames The filenames in a comma-separated list
  +     */
  +    public void insertData(String srcDir, String listOfFilenames) throws IOException, DatabaseHandlingException;
  +
  +    /**
  +     * Adds the data contained in the given input stream.  Note that the data is
  +     * expected to match the repository metadata (not the table schema).
  +     * 
  +     * @param reader A reader returning the content of the data file
  +     */
  +    public void insertData(Reader reader) throws IOException, DatabaseHandlingException;
  +
  +    /**
  +     * Generates SQL for inserting the data contained in the specified files, and
  +     * writes it to the given output stream. Note that this method does not access
  +     * the database.
  +     * 
  +     * @param srcDir          The directory containing the files
  +     * @param listOfFilenames The filenames in a comma-separated list
  +     * @param writer          The writer to write the SQL to
  +     * @throws DatabaseHandlingException If some part of the data files does not
  +     *                                   match the metamodel, or no metamodel was set
  +     */
  +    public void getInsertDataSql(String srcDir, String listOfFilenames, Writer writer) throws IOException, DatabaseHandlingException;
  +
  +    /**
  +     * Generates SQL for inserting the data contained in the given input stream, and
  +     * writes it to the given output stream. Note that this method does not access
  +     * the database.
  +     * 
  +     * @param reader A reader returning the content of the data file
  +     * @param writer The writer to write the SQL to
  +     * @throws DatabaseHandlingException If some part of the data files does not
  +     *                                   match the metamodel, or no metamodel was set
  +     */
  +    public void getInsertDataSql(Reader reader, Writer writer) throws DatabaseHandlingException;
  +
  +    /**
  +     * Generates the DTD for data XML files that can be inserted into the database via the
  +     * {@link #insertData(Reader)} and {@link #insertData(String, String)} methods. Note that
  +     * this method does not access the database.
  +     * 
  +     * @param writer The writer to write the DTD to
  +     * @throws DatabaseHandlingException If no metamodel was set
  +     */
  +    public void getDataDTD(Writer writer) throws DatabaseHandlingException;
   
       /**
        * Dumps the database schema to the given stream.
  
  
  
  1.2       +161 -8    db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/DatabaseHandlingTask.java
  
  Index: DatabaseHandlingTask.java
  ===================================================================
  RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/DatabaseHandlingTask.java,v
  retrieving revision 1.1
  retrieving revision 1.2
  diff -u -r1.1 -r1.2
  --- DatabaseHandlingTask.java	4 Jul 2004 21:45:51 -0000	1.1
  +++ DatabaseHandlingTask.java	22 Jul 2004 07:48:52 -0000	1.2
  @@ -72,6 +72,47 @@
       }
   
       /**
  +     * Data insertion action.
  +     */
  +    public class InsertDataAction implements DatabaseAction
  +    {
  +        private ArrayList _dataFileSets = new ArrayList();
  +
  +        public void add(FileSet files)
  +        {
  +            _dataFileSets.add(files);
  +        }
  +
  +        public void execute(DatabaseHandling handler) throws DatabaseHandlingException
  +        {
  +            for (Iterator it = _dataFileSets.iterator(); it.hasNext();)
  +            {
  +                FileSet          dataFileSet = (FileSet)it.next();
  +                DirectoryScanner scanner     = dataFileSet.getDirectoryScanner(getProject());
  +                String[]         files       = scanner.getIncludedFiles();
  +                StringBuffer     includes    = new StringBuffer();
  +
  +                for (int idx = 0; idx < files.length; idx++)
  +                {
  +                    if (idx > 0)
  +                    {
  +                        includes.append(",");
  +                    }
  +                    includes.append(files[idx]);
  +                }
  +                try
  +                {
  +                    handler.insertData(dataFileSet.getDir(getProject()).getAbsolutePath(), includes.toString());
  +                }
  +                catch (Exception ex)
  +                {
  +                    throw new BuildException(ex);
  +                }
  +            }
  +        }
  +    }
  +
  +    /**
        * Database drop action.
        */
       public class DropAction implements DatabaseAction
  @@ -160,7 +201,7 @@
       /**
        * Action for writing the creation sql to a file.
        */
  -    public class GenerateCreationSqlAction extends GenerateSqlActionBase
  +    public class GenerateCreateSqlAction extends GenerateSqlActionBase
       {
           public void execute(DatabaseHandling handler) throws DatabaseHandlingException
           {
  @@ -168,7 +209,7 @@
               {
                   Writer writer = getWriter();
       
  -                handler.getCreationSql(writer);
  +                handler.getCreateDatabaseSql(writer);
                   writer.close();
               }
               catch (IOException ex)
  @@ -181,7 +222,28 @@
       /**
        * Action for writing the alteration sql to a file.
        */
  -    public class GenerateAlterationSqlAction extends GenerateSqlActionBase
  +    public class GenerateAlterSqlAction extends GenerateSqlActionBase
  +    {
  +        public void execute(DatabaseHandling handler) throws DatabaseHandlingException
  +        {
  +            try
  +            {
  +                Writer writer = getWriter();
  +    
  +                handler.getAlterDatabaseSql(writer);
  +                writer.close();
  +            }
  +            catch (IOException ex)
  +            {
  +                throw new BuildException(ex);
  +            }
  +        }
  +    }
  +
  +    /**
  +     * Action for writing the data DTD to a file.
  +     */
  +    public class GenerateDataDTDAction extends GenerateSqlActionBase
       {
           public void execute(DatabaseHandling handler) throws DatabaseHandlingException
           {
  @@ -189,7 +251,58 @@
               {
                   Writer writer = getWriter();
       
  -                handler.getAlterationSql(writer);
  +                handler.getDataDTD(writer);
  +                writer.close();
  +            }
  +            catch (IOException ex)
  +            {
  +                throw new BuildException(ex);
  +            }
  +        }
  +    }
  +
  +    /**
  +     * Action for writin data insert sql to a file.
  +     */
  +    public class GenerateInsertDataSqlAction extends GenerateSqlActionBase
  +    {
  +        private ArrayList _dataFileSets = new ArrayList();
  +
  +        public void add(FileSet files)
  +        {
  +            _dataFileSets.add(files);
  +        }
  +
  +        public void execute(DatabaseHandling handler) throws DatabaseHandlingException
  +        {
  +            Writer writer = getWriter();
  +
  +            for (Iterator it = _dataFileSets.iterator(); it.hasNext();)
  +            {
  +                FileSet          dataFileSet = (FileSet)it.next();
  +                DirectoryScanner scanner     = dataFileSet.getDirectoryScanner(getProject());
  +                String[]         files       = scanner.getIncludedFiles();
  +                StringBuffer     includes    = new StringBuffer();
  +
  +                for (int idx = 0; idx < files.length; idx++)
  +                {
  +                    if (idx > 0)
  +                    {
  +                        includes.append(",");
  +                    }
  +                    includes.append(files[idx]);
  +                }
  +                try
  +                {
  +                    handler.getInsertDataSql(dataFileSet.getDir(getProject()).getAbsolutePath(), includes.toString(), writer);
  +                }
  +                catch (Exception ex)
  +                {
  +                    throw new BuildException(ex);
  +                }
  +            }
  +            try
  +            {
                   writer.close();
               }
               catch (IOException ex)
  @@ -210,7 +323,7 @@
               {
                   Writer writer = getWriter();
       
  -                handler.getDropSql(writer);
  +                handler.getDropDatabaseSql(writer);
                   writer.close();
               }
               catch (IOException ex)
  @@ -341,6 +454,19 @@
       }
   
       /**
  +     * Creates a new insert data action object for Ant.
  +     * 
  +     * @return The action object
  +     */
  +    public DatabaseAction createInsertData()
  +    {
  +        DatabaseAction action = new InsertDataAction();
  +
  +        _actions.add(action);
  +        return action;
  +    }
  +
  +    /**
        * Creates a new dump action object for Ant.
        * 
        * @return The action object
  @@ -360,7 +486,7 @@
        */
       public DatabaseAction createCreateSql()
       {
  -        DatabaseAction action = new GenerateCreationSqlAction();
  +        DatabaseAction action = new GenerateCreateSqlAction();
   
           _actions.add(action);
           return action;
  @@ -373,7 +499,33 @@
        */
       public DatabaseAction createAlterSql()
       {
  -        DatabaseAction action = new GenerateAlterationSqlAction();
  +        DatabaseAction action = new GenerateAlterSqlAction();
  +
  +        _actions.add(action);
  +        return action;
  +    }
  +
  +    /**
  +     * Creates a new createDataDTD action object for Ant.
  +     * 
  +     * @return The action object
  +     */
  +    public DatabaseAction createCreateDataDTD()
  +    {
  +        DatabaseAction action = new GenerateDataDTDAction();
  +
  +        _actions.add(action);
  +        return action;
  +    }
  +
  +    /**
  +     * Creates a new insertDataSql action object for Ant.
  +     * 
  +     * @return The action object
  +     */
  +    public DatabaseAction createInsertDataSql()
  +    {
  +        DatabaseAction action = new GenerateInsertDataSqlAction();
   
           _actions.add(action);
           return action;
  @@ -435,7 +587,8 @@
                   }
               }
               handling.setConnection(connRep.getDescriptor(pbKey));
  -
  +            handling.setMetadata(MetadataManager.getInstance().getGlobalRepository());
  +            
               for (Iterator it = _actions.iterator(); it.hasNext();)
               {
                   ((DatabaseAction)it.next()).execute(handling);
  
  
  
  1.2       +78 -9     db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/TorqueDatabaseHandling.java
  
  Index: TorqueDatabaseHandling.java
  ===================================================================
  RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/TorqueDatabaseHandling.java,v
  retrieving revision 1.1
  retrieving revision 1.2
  diff -u -r1.1 -r1.2
  --- TorqueDatabaseHandling.java	4 Jul 2004 21:45:51 -0000	1.1
  +++ TorqueDatabaseHandling.java	22 Jul 2004 07:48:52 -0000	1.2
  @@ -22,6 +22,7 @@
   import java.util.zip.GZIPInputStream;
   import java.util.zip.GZIPOutputStream;
   
  +import org.apache.ojb.broker.metadata.DescriptorRepository;
   import org.apache.ojb.broker.metadata.JdbcConnectionDescriptor;
   import org.apache.ojb.broker.util.logging.LoggerFactory;
   import org.apache.tools.ant.Project;
  @@ -126,6 +127,23 @@
           return _jcd;
       }
   
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getMetadata()
  +     */
  +    public DescriptorRepository getMetadata()
  +    {
  +        throw new UnsupportedOperationException("Metadata handling is currently not support");
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#setMetadata(org.apache.ojb.broker.metadata.DescriptorRepository)
  +     */
  +    public void setMetadata(DescriptorRepository metaModel)
  +    {
  +        throw new UnsupportedOperationException("Metadata handling is currently not support");
  +    }
  +
       /**
        * Returns the torque database platform used.
        * 
  @@ -166,8 +184,31 @@
        */
       public void addSchemaFile(Reader reader) throws IOException
       {
  -        _torqueSchemata.put("schema"+_torqueSchemata.size()+".xml",
  -                            readStreamCompressed(reader));
  +        try
  +        {
  +            _torqueSchemata.put("schema"+_torqueSchemata.size()+".xml",
  +                                readStreamCompressed(reader));
  +        }
  +        finally
  +        {
  +            reader.close();
  +        }
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#addDataFile(java.io.Reader)
  +     */
  +    public void insertData(Reader reader) throws IOException, DatabaseHandlingException
  +    {
  +        throw new UnsupportedOperationException("Data files are currently not supported");
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#addDataFiles(java.lang.String, java.lang.String)
  +     */
  +    public void insertData(String srcDir, String listOfFilenames) throws IOException, DatabaseHandlingException
  +    {
  +        throw new UnsupportedOperationException("Data files are currently not supported");
       }
   
       /**
  @@ -710,20 +751,17 @@
           throw new UnsupportedOperationException("Not yet implemented");
       }
   
  -    public void dumpDatabaseSchema(Writer writer)
  -            throws DatabaseHandlingException
  +    public void dumpDatabaseSchema(Writer writer) throws DatabaseHandlingException
       {
           throw new UnsupportedOperationException("Not yet implemented");
       }
   
  -    public void getAlterationSql(Writer writer)
  -            throws DatabaseHandlingException
  +    public void getAlterDatabaseSql(Writer writer)             throws DatabaseHandlingException
       {
           throw new UnsupportedOperationException("Not yet implemented");
       }
   
  -    public void getCreationSql(Writer writer)
  -            throws DatabaseHandlingException
  +    public void getCreateDatabaseSql(Writer writer) throws DatabaseHandlingException
       {
           throw new UnsupportedOperationException("Not yet implemented");
       }
  @@ -731,8 +769,39 @@
       /* (non-Javadoc)
        * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getDropSql(java.io.Writer)
        */
  -    public void getDropSql(Writer writer) throws DatabaseHandlingException
  +    public void getDropDatabaseSql(Writer writer) throws DatabaseHandlingException
       {
           throw new UnsupportedOperationException("Not yet implemented");
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#insertDataSql(java.io.Writer)
  +     */
  +    public void getInsertDataSql(Writer writer) throws DatabaseHandlingException
  +    {
  +        throw new UnsupportedOperationException("Data files are not supported yet");
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getInsertDataSql(java.io.Reader, java.io.Writer)
  +     */
  +    public void getInsertDataSql(Reader reader, Writer writer) throws DatabaseHandlingException
  +    {
  +        throw new UnsupportedOperationException("Data files are not supported yet");
  +    }
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getInsertDataSql(java.lang.String, java.lang.String, java.io.Writer)
  +     */
  +    public void getInsertDataSql(String srcDir, String listOfFilenames, Writer writer) throws IOException, DatabaseHandlingException
  +    {
  +        throw new UnsupportedOperationException("Data files are not supported yet");
  +    }
  +
  +    /* (non-Javadoc)
  +     * @see org.apache.ojb.broker.util.dbhandling.DatabaseHandling#getDataDTD(java.io.Writer)
  +     */
  +    public void getDataDTD(Writer writer) throws DatabaseHandlingException
  +    {
  +        throw new UnsupportedOperationException("Data files are not supported yet");
       }
   }
  
  
  
  1.1                  db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/CommonsSqlDataHandling.java
  
  Index: CommonsSqlDataHandling.java
  ===================================================================
  package org.apache.ojb.broker.util.dbhandling;
  
  /* Copyright 2004-2004 The Apache Software Foundation
   *
   * Licensed under the Apache License, Version 2.0 (the "License");
   * you may not use this file except in compliance with the License.
   * You may obtain a copy of the License at
   *
   *     http://www.apache.org/licenses/LICENSE-2.0
   *
   * Unless required by applicable law or agreed to in writing, software
   * distributed under the License is distributed on an "AS IS" BASIS,
   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   * See the License for the specific language governing permissions and
   * limitations under the License.
   */
  
  import java.io.IOException;
  import java.io.Reader;
  import java.io.StringReader;
  import java.io.StringWriter;
  import java.io.Writer;
  import java.util.ArrayList;
  import java.util.Iterator;
  
  import org.apache.commons.beanutils.DynaBean;
  import org.apache.commons.digester.Digester;
  import org.apache.commons.digester.ExtendedBaseRules;
  import org.apache.commons.digester.Rule;
  import org.apache.commons.digester.RuleSetBase;
  import org.apache.commons.sql.dynabean.DynaSql;
  import org.apache.commons.sql.model.Column;
  import org.apache.commons.sql.model.Database;
  import org.apache.commons.sql.model.Table;
  import org.apache.commons.sql.util.DDLExecutor;
  import org.apache.ojb.broker.metadata.DescriptorRepository;
  import org.xml.sax.Attributes;
  import org.xml.sax.EntityResolver;
  import org.xml.sax.InputSource;
  import org.xml.sax.SAXException;
  
  /**
   * Provides data input and output via commons-sql.
   * 
   * @author Thomas Dudziak
   */
  public class CommonsSqlDataHandling
  {
      public static class DataSet
      {
          private ArrayList _beans = new ArrayList();
  
          public void add(DynaBean bean)
          {
              _beans.add(bean);
          }
  
          public void createInsertionSql(Database db, Writer writer) throws IOException
          {
              DynaSql      dynaSql = new DynaSql();
              StringBuffer result  = new StringBuffer();
  
              dynaSql.setDatabase(db);
              for (Iterator it = _beans.iterator(); it.hasNext();)
              {
                  writer.write(dynaSql.getInsertSql((DynaBean)it.next()));
                  if (it.hasNext())
                  {
                      writer.write("\n");
                  }
              }
          }
      }
  
      private class DynaFactoryCreateRule extends Rule
      {
          private DynaSql _dynaSql;
  
          public DynaFactoryCreateRule()
          {
              _dynaSql = new DynaSql();
              _dynaSql.setDatabase(CommonsSqlDataHandling.this._db);
          }
  
          /* (non-Javadoc)
           * @see org.apache.commons.digester.Rule#begin(java.lang.String, java.lang.String, org.xml.sax.Attributes)
           */
          public void begin(String namespace, String name, Attributes attributes) throws Exception
          {
              Table table = _preparedModel.getTableFor(name);
  
              if (table == null)
              {
                  throw new DatabaseHandlingException("The element "+name+" does not map to a table in the database schema");
              }
  
              DynaBean bean = _dynaSql.newInstance(table.getName());
  
              for (int idx = 0; idx < attributes.getLength(); idx++)
              {
                  String attrName  = attributes.getLocalName(idx);
                  String attrValue = attributes.getValue(idx);
                  Column column    = _preparedModel.getColumnFor(name, attrName);
  
                  if (column == null)
                  {
                      throw new DatabaseHandlingException("The attribute "+attrName+" of element "+name+" does not map to a column in table "+table.getName()+" in the database schema");
                  }
                  bean.set(column.getName(), attrValue);
              }
              CommonsSqlDataHandling.this._digester.push(bean);
          }
  
          /* (non-Javadoc)
           * @see org.apache.commons.digester.Rule#end(java.lang.String, java.lang.String)
           */
          public void end(String namespace, String name) throws Exception
          {
              DynaBean bean = (DynaBean)CommonsSqlDataHandling.this._digester.pop();
  
              ((DataSet)CommonsSqlDataHandling.this._digester.peek()).add(bean);
          }
      }
  
      public class DataRuleSet extends RuleSetBase
      {
          /* (non-Javadoc)
           * @see org.apache.commons.digester.RuleSet#addRuleInstances(org.apache.commons.digester.Digester)
           */
          public void addRuleInstances(Digester digester)
          {
              digester.addObjectCreate("dataset", DataSet.class);
              digester.addRule("*/dataset/*", new DynaFactoryCreateRule());
          }
      }
  
      /** The database model */
      private Database _db;
      /** The model */
      private DescriptorRepository _model;
      /** The prepared model */
      private PreparedModel _preparedModel;
      /** The digester for parsing the XML */
      private Digester _digester;
  
      /**
       * Creates a new data handling object.
       * 
       * @param db    The database model
       * @param model The model
       */
      public CommonsSqlDataHandling(Database db, DescriptorRepository model)
      {
          _db            = db;
          _model         = model;
          _preparedModel = new PreparedModel(model, db);
          _digester      = new Digester();
          _digester.setEntityResolver(new EntityResolver() {
              public InputSource resolveEntity(String publicId, String systemId) throws SAXException
              {
                  // we don't care about the DTD for data files
                  return new InputSource(new StringReader(""));
              }
  
          });
          _digester.setNamespaceAware(true);
          _digester.setValidating(false);
          _digester.setUseContextClassLoader(true);
          _digester.setRules(new ExtendedBaseRules());
          _digester.addRuleSet(new DataRuleSet());
      }
  
      /**
       * Returns the sql necessary to add the data XML contained in the given input stream. 
       * Note that the data is expected to match the repository metadata (not the table schema).
       * Also note that you should not use the reader after passing it to this method except closing
       * it (which is not done automatically).
       * 
       * @param input  A reader returning the content of the data file
       * @param output The writer to write the sql to
       */
      public void getInsertDataSql(Reader input, Writer output) throws DatabaseHandlingException
      {
          try
          {
              DataSet set = (DataSet)_digester.parse(input);
  
              set.createInsertionSql(_db, output);
          }
          catch (Exception ex)
          {
              if (ex instanceof DatabaseHandlingException)
              {
                  // is not declared by digester, but may be thrown
                  throw (DatabaseHandlingException)ex;
              }
              else
              {
                  throw new DatabaseHandlingException(ex);
              }
          }
      }
  
      /**
       * Writes a DTD that can be used for data XML files matching the current model to the given writer. 
       * 
       * @param output The writer to write the DTD to
       */
      public void getDataDTD(Writer output) throws DatabaseHandlingException
      {
          try
          {
              output.write("<!ELEMENT dataset (\n");
              for (Iterator it = _preparedModel.getElementNames(); it.hasNext();)
              {
                  String elementName = (String)it.next();
  
                  output.write("    ");
                  output.write(elementName);
                  output.write("*");
                  output.write(it.hasNext() ? " |\n" : "\n");
              }
              output.write(")>\n<!ATTLIST dataset\n    name CDATA #REQUIRED\n>\n");
              for (Iterator it = _preparedModel.getElementNames(); it.hasNext();)
              {
                  String elementName = (String)it.next();
  
                  output.write("\n<!ELEMENT ");
                  output.write(elementName);
                  output.write(" EMPTY>\n<!ATTLIST ");
                  output.write(elementName);
                  output.write("\n");
  
                  for (Iterator attrIt = _preparedModel.getAttributeNames(elementName); attrIt.hasNext();)
                  {
                      String attrName = (String)attrIt.next();
  
                      output.write("    ");
                      output.write(attrName);
                      output.write(" CDATA #");
                      output.write(_preparedModel.isRequired(elementName, attrName) ? "REQUIRED" : "IMPLIED");
                      output.write("\n");
                  }
                  output.write(">\n");
              }
          }
          catch (IOException ex)
          {
              throw new DatabaseHandlingException(ex);
          }
      }
  
      /**
       * Returns the sql necessary to add the data XML contained in the given input stream. 
       * Note that the data is expected to match the repository metadata (not the table schema).
       * Also note that you should not use the reader after passing it to this method except closing
       * it (which is not done automatically).
       * 
       * @param input  A reader returning the content of the data file
       * @param output The writer to write the sql to
       */
      public void insertData(Reader input, DDLExecutor executor) throws DatabaseHandlingException
      {
          try
          {
              DataSet      set    = (DataSet)_digester.parse(input);
              StringWriter writer = new StringWriter();
  
              set.createInsertionSql(_db, writer);
              executor.evaluateBatch(writer.getBuffer().toString());
          }
          catch (Exception ex)
          {
              if (ex instanceof DatabaseHandlingException)
              {
                  // is not declared by digester, but may be thrown
                  throw (DatabaseHandlingException)ex;
              }
              else
              {
                  throw new DatabaseHandlingException(ex);
              }
          }
      }
  }
  
  
  
  1.1                  db-ojb/src/java/org/apache/ojb/broker/util/dbhandling/PreparedModel.java
  
  Index: PreparedModel.java
  ===================================================================
  package org.apache.ojb.broker.util.dbhandling;
  
  /* Copyright 2004-2004 The Apache Software Foundation
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
  *
  *     http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
  
  import java.util.HashMap;
  import java.util.Iterator;
  import java.util.TreeMap;
  
  import org.apache.commons.sql.model.Column;
  import org.apache.commons.sql.model.Database;
  import org.apache.commons.sql.model.Table;
  import org.apache.ojb.broker.metadata.ClassDescriptor;
  import org.apache.ojb.broker.metadata.CollectionDescriptor;
  import org.apache.ojb.broker.metadata.DescriptorRepository;
  import org.apache.ojb.broker.metadata.FieldDescriptor;
  
  /**
   * Provides a model derived from {@link org.apache.ojb.broker.metadata.DescriptorRepository} that
   * is preprocessed for data handling (inserting data, generating data dtd).
   * 
   * @author Thomas Dudziak
   */
  public class PreparedModel
  {
      private TreeMap _elementToTable                = new TreeMap();
      private HashMap _elementToColumnMap            = new HashMap();
      private HashMap _elementToRequiredColumnsMap   = new HashMap();
      private HashMap _indirectionTableToElementName = new HashMap();
  
      public PreparedModel(DescriptorRepository model, Database schema)
      {
          prepareModel(model, schema);
      }
  
      public Iterator getElementNames()
      {
          return _elementToTable.keySet().iterator();
      }
  
      public Iterator getAttributeNames(String elementName)
      {
          TreeMap columns = (TreeMap)_elementToColumnMap.get(elementName);
  
          return columns == null ? null : columns.keySet().iterator();
      }
  
      public boolean isRequired(String elementName, String attributeName)
      {
          HashMap requiredColumns = (HashMap)_elementToRequiredColumnsMap.get(elementName);
  
          return requiredColumns == null ? false : requiredColumns.containsKey(attributeName);
      }
      
      public Table getTableFor(String elementName)
      {
          return (Table)_elementToTable.get(elementName);
      }
  
      public Column getColumnFor(String elementName, String attrName)
      {
          TreeMap columns = (TreeMap)_elementToColumnMap.get(elementName);
  
          return columns == null ? null : (Column)columns.get(attrName);
      }
  
      /**
       * Prepares a representation of the model that is easier accessible for our purposes.
       * 
       * @param model  The original model
       * @param schema The database schema
       * @return The model representation
       */
      private void prepareModel(DescriptorRepository model, Database schema)
      {
          TreeMap result = new TreeMap();
  
          for (Iterator it = model.getDescriptorTable().values().iterator(); it.hasNext();)
          {
              ClassDescriptor classDesc          = (ClassDescriptor)it.next();
              String          elementName        = getElementName(classDesc.getClassNameOfObject());
              Table           mappedTable        = (Table)_elementToTable.get(elementName);
              TreeMap         columnsMap         = (TreeMap)_elementToColumnMap.get(elementName);
              HashMap         requiredColumnsMap = (HashMap)_elementToRequiredColumnsMap.get(elementName);
  
              if (mappedTable == null)
              {
                  mappedTable = schema.findTable(classDesc.getFullTableName());
                  if (mappedTable == null)
                  {
                      continue;
                  }
                  columnsMap         = new TreeMap();
                  requiredColumnsMap = new HashMap();
                  _elementToTable.put(elementName, mappedTable);
                  _elementToColumnMap.put(elementName, columnsMap);
                  _elementToRequiredColumnsMap.put(elementName, requiredColumnsMap);
              }
              extractAttributes(classDesc, mappedTable, columnsMap, requiredColumnsMap);
              extractIndirectionTables(model, schema, classDesc);
          }
      }
  
      private void extractAttributes(ClassDescriptor classDesc, Table mappedTable, TreeMap columnsMap, HashMap requiredColumnsMap)
      {
          FieldDescriptor[] fieldDescs = classDesc.getFieldDescriptions();
  
          if (fieldDescs != null)
          {
              for (int idx = 0; idx < fieldDescs.length; idx++)
              {
                  Column column = mappedTable.findColumn(fieldDescs[idx].getColumnName());
  
                  if (column != null)
                  {
                      String shortAttrName = getShortAttributeName(fieldDescs[idx].getAttributeName());
      
                      columnsMap.put(shortAttrName, column);
                      requiredColumnsMap.put(shortAttrName,
                                             fieldDescs[idx].isPrimaryKey() ? Boolean.TRUE : Boolean.FALSE);
                  }
              }
          }
      }
  
      /**
       * Extracts indirection tables from the given class descriptor, and adds elements
       * for them.
       * 
       * @param classDesc The class descriptor
       * @param elements  The elements
       */
      private void extractIndirectionTables(DescriptorRepository model, Database schema, ClassDescriptor classDesc)
      {
          String elementName = getElementName(classDesc.getClassNameOfObject());
  
          for (Iterator it = classDesc.getCollectionDescriptors().iterator(); it.hasNext();)
          {
              CollectionDescriptor collDesc  = (CollectionDescriptor)it.next();
              String               tableName = collDesc.getIndirectionTable();
  
              if ((tableName != null) &&
                  (tableName.length() > 0))
              {
                  // TODO: determine either top-most type combination or shortest name combination for
                  //       this indirection table
                  String          indirectionElementName = (String)_indirectionTableToElementName.get(tableName);
                  ClassDescriptor otherClassDesc         = model.getDescriptorFor(collDesc.getItemClassName());
                  String          otherElementName       = getElementName(otherClassDesc.getClassNameOfObject());
  
                  if (indirectionElementName == null)
                  {
                      if (elementName.compareTo(otherElementName) > 0)
                      {
                          // searching for collection at other end
                          for (Iterator otherCollIt = otherClassDesc.getCollectionDescriptors().iterator(); otherCollIt.hasNext();)
                          {
                              CollectionDescriptor otherCollDesc = (CollectionDescriptor)otherCollIt.next();
  
                              if (tableName.equals(otherCollDesc.getIndirectionTable()))
                              {
                                  indirectionElementName = otherElementName + "-" + otherCollDesc.getAttributeName() + "-" + elementName;
                                  break;
                              }
                          }
                      }
                      if (indirectionElementName == null)
                      {
                          indirectionElementName = elementName + "-" + collDesc.getAttributeName() + "-" + otherElementName;
                      }
                      _indirectionTableToElementName.put(collDesc.getIndirectionTable(), indirectionElementName);
                  }
  
                  Table   mappedTable        = (Table)_elementToTable.get(indirectionElementName);
                  TreeMap columnsMap         = (TreeMap)_elementToColumnMap.get(indirectionElementName);
                  HashMap requiredColumnsMap = (HashMap)_elementToRequiredColumnsMap.get(indirectionElementName);
  
                  if (mappedTable == null)
                  {
                      mappedTable = schema.findTable(collDesc.getIndirectionTable());
                      if (mappedTable == null)
                      {
                          continue;
                      }
                      columnsMap         = new TreeMap();
                      requiredColumnsMap = new HashMap();
                      _elementToTable.put(indirectionElementName, mappedTable);
                      _elementToColumnMap.put(indirectionElementName, columnsMap);
                      _elementToRequiredColumnsMap.put(indirectionElementName, requiredColumnsMap);
                  }
                  extractIndirectionKeys(classDesc,      elementName,      mappedTable, columnsMap, requiredColumnsMap);
                  extractIndirectionKeys(otherClassDesc, otherElementName, mappedTable, columnsMap, requiredColumnsMap);
              }
          }
      }
  
      private void extractIndirectionKeys(ClassDescriptor classDesc, String elementName, Table mappedTable, TreeMap columnsMap, HashMap requiredColumnsMap)
      {
          FieldDescriptor[] fieldDescs = classDesc.getFieldDescriptions();
  
          if (fieldDescs != null)
          {
              for (int idx = 0; idx < fieldDescs.length; idx++)
              {
                  if (!fieldDescs[idx].isPrimaryKey())
                  {
                      continue;
                  }
  
                  Column column = mappedTable.findColumn(fieldDescs[idx].getColumnName());
  
                  if (column != null)
                  {
                      String shortAttrName = elementName + "-" + getShortAttributeName(fieldDescs[idx].getAttributeName());
  
                      columnsMap.put(shortAttrName, column);
                      requiredColumnsMap.put(shortAttrName, Boolean.TRUE);
                  }
              }
          }
      }
  
      /**
       * Adjusts the short (unqualified) class name.
       * 
       * @param className The (qualified) class name
       * @return The short class name
       */
      private String getElementName(String className)
      {
          String elementName = className.replace('$', '.');
  
          elementName = elementName.substring(elementName.lastIndexOf('.') + 1);
          return elementName;
      }
  
      /**
       * Adjusts the local attribute name (the part after the last '::' for nested fields).
       * 
       * @param attrName The original attribute name
       * @return The local attribute name
       */
      private String getShortAttributeName(String attrName)
      {
          return attrName.substring(attrName.lastIndexOf(':') + 1);
      }
  }
  
  
  
  1.4       +184 -183  db-ojb/lib/commons-sql-1.0-dev.jar
  
  	<<Binary file>>
  
  
  1.146     +7 -3      db-ojb/build.xml
  
  Index: build.xml
  ===================================================================
  RCS file: /home/cvs/db-ojb/build.xml,v
  retrieving revision 1.145
  retrieving revision 1.146
  diff -u -r1.145 -r1.146
  --- build.xml	15 Jul 2004 21:54:04 -0000	1.145
  +++ build.xml	22 Jul 2004 07:48:53 -0000	1.146
  @@ -435,9 +435,13 @@
           <dbhandling workdir="${build.test}/ojb">
               <fileset dir="${build.test}" includes="*schema.xml"/>
               <!-- We're dumping the sql simply for testing purposes -->
  -            <createSql destFile="${build.test}/ojb/test.sql"/>
  +            <createSql destFile="${build.test}/test.sql"/>
               <!-- This actually creates the database -->
  -            <create/>
  +            <!-- <create/> -->
  +            <createDataDTD destFile="${build.test}/data.dtd"/>
  +            <insertDataSql>
  +                <fileset file="${build.test}/ojbtest-data.xml"/>
  +            </insertDataSql>
           </dbhandling>
   
       </target>
  
  
  

---------------------------------------------------------------------
To unsubscribe, e-mail: ojb-dev-unsubscribe@db.apache.org
For additional commands, e-mail: ojb-dev-help@db.apache.org


Mime
View raw message