cayenne-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From aadamc...@apache.org
Subject [08/17] cayenne git commit: CAY-2026 Java 7
Date Sat, 12 Sep 2015 10:02:36 GMT
http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/h2/H2PkGenerator.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/h2/H2PkGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/h2/H2PkGenerator.java
index a6264aa..a7d7788 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/h2/H2PkGenerator.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/h2/H2PkGenerator.java
@@ -41,83 +41,68 @@ import org.apache.cayenne.map.DbKeyGenerator;
  */
 public class H2PkGenerator extends OraclePkGenerator {
 
-    protected H2PkGenerator(JdbcAdapter adapter) {
-        super(adapter);
-    }
-
-    @Override
-    protected String createSequenceString(DbEntity ent) {
-        return "CREATE SEQUENCE " + sequenceName(ent) + " START WITH " + pkStartValue
-                + " INCREMENT BY " + pkCacheSize(ent) + " CACHE 1";
-    }
-
-    @Override
-    protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
-
-        DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator();
-        String pkGeneratingSequenceName;
-        if (pkGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType())
-                && pkGenerator.getGeneratorName() != null) {
-            pkGeneratingSequenceName = pkGenerator.getGeneratorName();
-        } else {
-            pkGeneratingSequenceName = sequenceName(entity);
-        }
-
-        Connection con = node.getDataSource().getConnection();
-        try {
-            Statement st = con.createStatement();
-            try {
-                String sql = "SELECT NEXT VALUE FOR " + pkGeneratingSequenceName;
-                adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
-                ResultSet rs = st.executeQuery(sql);
-                try {
-                    // Object pk = null;
-                    if (!rs.next()) {
-                        throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName());
-                    }
-                    return rs.getLong(1);
-                } finally {
-                    rs.close();
-                }
-            } finally {
-                st.close();
-            }
-        } finally {
-            con.close();
-        }
-
-    }
-
-    /**
-     * Fetches a list of existing sequences that might match Cayenne generated
-     * ones.
-     */
-    @Override
-    protected List<String> getExistingSequences(DataNode node) throws SQLException {
-
-        // check existing sequences
-        Connection con = node.getDataSource().getConnection();
-
-        try {
-            Statement sel = con.createStatement();
-            try {
-                String sql = "SELECT LOWER(sequence_name) FROM Information_Schema.Sequences";
-                adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
-                ResultSet rs = sel.executeQuery(sql);
-                try {
-                    List<String> sequenceList = new ArrayList<String>();
-                    while (rs.next()) {
-                        sequenceList.add(rs.getString(1));
-                    }
-                    return sequenceList;
-                } finally {
-                    rs.close();
-                }
-            } finally {
-                sel.close();
-            }
-        } finally {
-            con.close();
-        }
-    }
+	protected H2PkGenerator(JdbcAdapter adapter) {
+		super(adapter);
+	}
+
+	@Override
+	protected String createSequenceString(DbEntity ent) {
+		return "CREATE SEQUENCE " + sequenceName(ent) + " START WITH " + pkStartValue + " INCREMENT BY "
+				+ pkCacheSize(ent) + " CACHE 1";
+	}
+
+	@Override
+	protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
+
+		DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator();
+		String pkGeneratingSequenceName;
+		if (pkGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType())
+				&& pkGenerator.getGeneratorName() != null) {
+			pkGeneratingSequenceName = pkGenerator.getGeneratorName();
+		} else {
+			pkGeneratingSequenceName = sequenceName(entity);
+		}
+
+		try (Connection con = node.getDataSource().getConnection();) {
+
+			try (Statement st = con.createStatement();) {
+				String sql = "SELECT NEXT VALUE FOR " + pkGeneratingSequenceName;
+				adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
+
+				try (ResultSet rs = st.executeQuery(sql);) {
+					// Object pk = null;
+					if (!rs.next()) {
+						throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName());
+					}
+					return rs.getLong(1);
+				}
+			}
+		}
+	}
+
+	/**
+	 * Fetches a list of existing sequences that might match Cayenne generated
+	 * ones.
+	 */
+	@Override
+	protected List<String> getExistingSequences(DataNode node) throws SQLException {
+
+		// check existing sequences
+
+		try (Connection con = node.getDataSource().getConnection();) {
+
+			try (Statement sel = con.createStatement();) {
+				String sql = "SELECT LOWER(sequence_name) FROM Information_Schema.Sequences";
+				adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
+
+				try (ResultSet rs = sel.executeQuery(sql);) {
+					List<String> sequenceList = new ArrayList<String>();
+					while (rs.next()) {
+						sequenceList.add(rs.getString(1));
+					}
+					return sequenceList;
+				}
+			}
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/ingres/IngresPkGenerator.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/ingres/IngresPkGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/ingres/IngresPkGenerator.java
index b53a648..91c8db2 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/ingres/IngresPkGenerator.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/ingres/IngresPkGenerator.java
@@ -41,83 +41,61 @@ import org.apache.cayenne.map.DbKeyGenerator;
  */
 public class IngresPkGenerator extends OraclePkGenerator {
 
-    protected IngresPkGenerator(JdbcAdapter adapter) {
-        super(adapter);
-    }
+	protected IngresPkGenerator(JdbcAdapter adapter) {
+		super(adapter);
+	}
 
-    @Override
-    protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
+	@Override
+	protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
 
-        DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator();
-        String pkGeneratingSequenceName;
-        if (pkGenerator != null
-                && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType())
-                && pkGenerator.getGeneratorName() != null) {
-            pkGeneratingSequenceName = pkGenerator.getGeneratorName();
-        } else {
-            pkGeneratingSequenceName = sequenceName(entity);
-        }
+		DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator();
+		String pkGeneratingSequenceName;
+		if (pkGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType())
+				&& pkGenerator.getGeneratorName() != null) {
+			pkGeneratingSequenceName = pkGenerator.getGeneratorName();
+		} else {
+			pkGeneratingSequenceName = sequenceName(entity);
+		}
 
-        Connection con = node.getDataSource().getConnection();
-        try {
-            Statement st = con.createStatement();
-            try {
-                String sql = "SELECT " + pkGeneratingSequenceName + ".nextval";
-                adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
-                ResultSet rs = st.executeQuery(sql);
-                try {
-                    // Object pk = null;
-                    if (!rs.next()) {
-                        throw new CayenneRuntimeException(
-                                "Error generating pk for DbEntity " + entity.getName());
-                    }
-                    return rs.getLong(1);
-                }
-                finally {
-                    rs.close();
-                }
-            }
-            finally {
-                st.close();
-            }
-        }
-        finally {
-            con.close();
-        }
-    }
+		try (Connection con = node.getDataSource().getConnection();) {
 
-    @Override
-    protected List<String> getExistingSequences(DataNode node) throws SQLException {
+			try (Statement st = con.createStatement();) {
+				String sql = "SELECT " + pkGeneratingSequenceName + ".nextval";
+				adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
 
-        // check existing sequences
-        Connection connection = node.getDataSource().getConnection();
+				try (ResultSet rs = st.executeQuery(sql);) {
+					// Object pk = null;
+					if (!rs.next()) {
+						throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName());
+					}
+					return rs.getLong(1);
+				}
+			}
+		}
+	}
 
-        try {
-            Statement select = connection.createStatement();
-            try {
-                String sql = "select seq_name from iisequences where seq_owner != 'DBA'";
-                adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
-                ResultSet rs = select.executeQuery(sql);
-                try {
-                    List<String> sequenceList = new ArrayList<String>();
-                    while (rs.next()) {
-                        String name = rs.getString(1);
-                        if (name != null) {
-                            sequenceList.add(name.trim());
-                        }
-                    }
-                    return sequenceList;
-                }
-                finally {
-                    rs.close();
-                }
-            }
-            finally {
-                select.close();
-            }
-        }
-        finally {
-            connection.close();
-        }
-    }
+	@Override
+	protected List<String> getExistingSequences(DataNode node) throws SQLException {
+
+		// check existing sequences
+
+		try (Connection connection = node.getDataSource().getConnection();) {
+
+			try (Statement select = connection.createStatement();) {
+				String sql = "select seq_name from iisequences where seq_owner != 'DBA'";
+				adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
+
+				try (ResultSet rs = select.executeQuery(sql);) {
+					List<String> sequenceList = new ArrayList<String>();
+					while (rs.next()) {
+						String name = rs.getString(1);
+						if (name != null) {
+							sequenceList.add(name.trim());
+						}
+					}
+					return sequenceList;
+				}
+			}
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLPkGenerator.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLPkGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLPkGenerator.java
index 7ad240a..180a4c2 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLPkGenerator.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLPkGenerator.java
@@ -34,171 +34,149 @@ import org.apache.cayenne.map.DbEntity;
  */
 public class MySQLPkGenerator extends JdbcPkGenerator {
 
-    MySQLPkGenerator(JdbcAdapter adapter) {
-        super(adapter);
-    }
-
-    @Override
-    protected String dropAutoPkString() {
-        return "DROP TABLE IF EXISTS AUTO_PK_SUPPORT";
-    }
-
-    /**
-     * Overrides superclass's implementation to perform locking of the primary key lookup
-     * table.
-     * 
-     * @since 3.0
-     */
-    @Override
-    protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
-
-        // must work directly with JDBC connection, since we
-        // must unlock the AUTO_PK_SUPPORT table in case of
-        // failures.... ah..JDBC is fun...
-
-        // chained SQL exception
-        SQLException exception = null;
-        long pk = -1l;
-
-        Connection con = node.getDataSource().getConnection();
-        try {
-
-            if (con.getAutoCommit()) {
-                con.setAutoCommit(false);
-            }
-
-            Statement st = con.createStatement();
-
-            try {
-                pk = getLongPrimaryKey(st, entity.getName());
-                con.commit();
-            }
-            catch (SQLException pkEx) {
-
-                try {
-                    con.rollback();
-                }
-                catch (SQLException e) {
-
-                }
-
-                exception = processSQLException(pkEx, exception);
-            }
-            finally {
-                // UNLOCK!
-                // THIS MUST BE EXECUTED NO MATTER WHAT, OR WE WILL LOCK THE PRIMARY KEY
-                // TABLE!!
-                try {
-                    String unlockString = "UNLOCK TABLES";
-                    adapter.getJdbcEventLogger().logQuery(unlockString, Collections.EMPTY_LIST);
-                    st.execute(unlockString);
-                }
-                catch (SQLException unlockEx) {
-                    exception = processSQLException(unlockEx, exception);
-                }
-                finally {
-                    // close statement
-                    try {
-                        st.close();
-                    }
-                    catch (SQLException stClosingEx) {
-                        // ignoring...
-                    }
-                }
-            }
-        }
-        catch (SQLException otherEx) {
-            exception = processSQLException(otherEx, exception);
-        }
-        finally {
-            try {
-                con.close();
-            }
-            catch (SQLException closingEx) {
-                // ignoring
-            }
-        }
-
-        // check errors
-        if (exception != null) {
-            throw exception;
-        }
-
-        return pk;
-
-    }
-
-    /**
-     * Appends a new SQLException to the chain. If parent is null, uses the exception as
-     * the chain root.
-     */
-    protected SQLException processSQLException(SQLException exception, SQLException parent) {
-        if (parent == null) {
-            return exception;
-        }
-
-        parent.setNextException(exception);
-        return parent;
-    }
-
-    @Override
-    protected String pkTableCreateString() {
-        StringBuilder buf = new StringBuilder();
-        buf.append("CREATE TABLE IF NOT EXISTS AUTO_PK_SUPPORT (").append(
-                "  TABLE_NAME CHAR(100) NOT NULL,").append(
-                "  NEXT_ID BIGINT NOT NULL, UNIQUE (TABLE_NAME)").append(")");
-
-        return buf.toString();
-    }
-
-    /**
-     * @since 3.0
-     */
-    protected long getLongPrimaryKey(Statement statement, String entityName)
-            throws SQLException {
-        // lock
-        String lockString = "LOCK TABLES AUTO_PK_SUPPORT WRITE";
-        adapter.getJdbcEventLogger().logQuery(lockString, Collections.EMPTY_LIST);
-        statement.execute(lockString);
-
-        // select
-
-        String selectString = super.pkSelectString(entityName);
-        adapter.getJdbcEventLogger().logQuery(selectString, Collections.EMPTY_LIST);
-        ResultSet rs = statement.executeQuery(selectString);
-        long pk = -1;
-        try {
-            if (!rs.next()) {
-                throw new SQLException("No rows for '" + entityName + "'");
-            }
-
-            pk = rs.getLong(1);
-
-            if (rs.next()) {
-                throw new SQLException("More than one row for '" + entityName + "'");
-            }
-        }
-        finally {
-            try {
-                rs.close();
-            }
-            catch (Exception ex) {
-                // ignoring...
-            }
-        }
-
-        // update
-        String updateString = super.pkUpdateString(entityName) + " AND NEXT_ID = " + pk;
-        adapter.getJdbcEventLogger().logQuery(updateString, Collections.EMPTY_LIST);
-        int updated = statement.executeUpdate(updateString);
-        // optimistic lock failure...
-        if (updated != 1) {
-            throw new SQLException("Error updating PK count '"
-                    + entityName
-                    + "': "
-                    + updated);
-        }
-
-        return pk;
-    }
+	MySQLPkGenerator(JdbcAdapter adapter) {
+		super(adapter);
+	}
+
+	@Override
+	protected String dropAutoPkString() {
+		return "DROP TABLE IF EXISTS AUTO_PK_SUPPORT";
+	}
+
+	/**
+	 * Overrides superclass's implementation to perform locking of the primary
+	 * key lookup table.
+	 * 
+	 * @since 3.0
+	 */
+	@Override
+	protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
+
+		// must work directly with JDBC connection, since we
+		// must unlock the AUTO_PK_SUPPORT table in case of
+		// failures.... ah..JDBC is fun...
+
+		// chained SQL exception
+		SQLException exception = null;
+		long pk = -1l;
+
+		try (Connection con = node.getDataSource().getConnection();) {
+
+			if (con.getAutoCommit()) {
+				con.setAutoCommit(false);
+			}
+
+			Statement st = con.createStatement();
+
+			try {
+				pk = getLongPrimaryKey(st, entity.getName());
+				con.commit();
+			} catch (SQLException pkEx) {
+
+				try {
+					con.rollback();
+				} catch (SQLException e) {
+
+				}
+
+				exception = processSQLException(pkEx, exception);
+			} finally {
+				// UNLOCK!
+				// THIS MUST BE EXECUTED NO MATTER WHAT, OR WE WILL LOCK THE
+				// PRIMARY KEY
+				// TABLE!!
+				try {
+					String unlockString = "UNLOCK TABLES";
+					adapter.getJdbcEventLogger().logQuery(unlockString, Collections.EMPTY_LIST);
+					st.execute(unlockString);
+				} catch (SQLException unlockEx) {
+					exception = processSQLException(unlockEx, exception);
+				} finally {
+					// close statement
+					try {
+						st.close();
+					} catch (SQLException stClosingEx) {
+						// ignoring...
+					}
+				}
+			}
+		} catch (SQLException otherEx) {
+			exception = processSQLException(otherEx, exception);
+		}
+
+		// check errors
+		if (exception != null) {
+			throw exception;
+		}
+
+		return pk;
+
+	}
+
+	/**
+	 * Appends a new SQLException to the chain. If parent is null, uses the
+	 * exception as the chain root.
+	 */
+	protected SQLException processSQLException(SQLException exception, SQLException parent) {
+		if (parent == null) {
+			return exception;
+		}
+
+		parent.setNextException(exception);
+		return parent;
+	}
+
+	@Override
+	protected String pkTableCreateString() {
+		StringBuilder buf = new StringBuilder();
+		buf.append("CREATE TABLE IF NOT EXISTS AUTO_PK_SUPPORT (").append("  TABLE_NAME CHAR(100) NOT NULL,")
+				.append("  NEXT_ID BIGINT NOT NULL, UNIQUE (TABLE_NAME)").append(")");
+
+		return buf.toString();
+	}
+
+	/**
+	 * @since 3.0
+	 */
+	protected long getLongPrimaryKey(Statement statement, String entityName) throws SQLException {
+		// lock
+		String lockString = "LOCK TABLES AUTO_PK_SUPPORT WRITE";
+		adapter.getJdbcEventLogger().logQuery(lockString, Collections.EMPTY_LIST);
+		statement.execute(lockString);
+
+		// select
+
+		String selectString = super.pkSelectString(entityName);
+		adapter.getJdbcEventLogger().logQuery(selectString, Collections.EMPTY_LIST);
+		ResultSet rs = statement.executeQuery(selectString);
+		long pk = -1;
+		try {
+			if (!rs.next()) {
+				throw new SQLException("No rows for '" + entityName + "'");
+			}
+
+			pk = rs.getLong(1);
+
+			if (rs.next()) {
+				throw new SQLException("More than one row for '" + entityName + "'");
+			}
+		} finally {
+			try {
+				rs.close();
+			} catch (Exception ex) {
+				// ignoring...
+			}
+		}
+
+		// update
+		String updateString = super.pkUpdateString(entityName) + " AND NEXT_ID = " + pk;
+		adapter.getJdbcEventLogger().logQuery(updateString, Collections.EMPTY_LIST);
+		int updated = statement.executeUpdate(updateString);
+		// optimistic lock failure...
+		if (updated != 1) {
+			throw new SQLException("Error updating PK count '" + entityName + "': " + updated);
+		}
+
+		return pk;
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLProcedureAction.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLProcedureAction.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLProcedureAction.java
index 5c5b199..ac189ab 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLProcedureAction.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLProcedureAction.java
@@ -35,115 +35,102 @@ import org.apache.cayenne.query.ProcedureQuery;
  */
 class MySQLProcedureAction extends ProcedureAction {
 
-    public MySQLProcedureAction(ProcedureQuery query, DataNode dataNode) {
-        super(query, dataNode);
-    }
-
-    @Override
-    public void performAction(Connection connection, OperationObserver observer)
-            throws SQLException, Exception {
-
-        processedResultSets = 0;
-
-        ProcedureTranslator transl = createTranslator(connection);
-        CallableStatement statement = (CallableStatement) transl.createStatement();
-
-        try {
-
-            // this is one difference with super - we need to read the first result set
-            // without calling 'getMoreResults' - which may actually be a good default
-            // strategy?
-            boolean firstResult = statement.execute();
-
-            // read out parameters
-            readProcedureOutParameters(statement, observer);
-
-            // read first result
-            if (firstResult) {
-                processResultSet(statement, observer);
-            }
-            else if (!processUpdate(statement, observer)) {
-                return;
-            }
-
-            // read the rest of the query
-            while (true) {
-                if (statement.getMoreResults()) {
-                    processResultSet(statement, observer);
-                }
-                else if (!processUpdate(statement, observer)) {
-                    break;
-                }
-            }
-        }
-        finally {
-            try {
-                statement.close();
-            }
-            catch (SQLException ex) {
-
-            }
-        }
-    }
-
-    private void processResultSet(CallableStatement statement, OperationObserver observer)
-            throws Exception {
-        ResultSet rs = statement.getResultSet();
-
-        try {
-            RowDescriptor descriptor = describeResultSet(rs, processedResultSets++);
-            readResultSet(rs, descriptor, query, observer);
-        }
-        finally {
-            try {
-                rs.close();
-            }
-            catch (SQLException ex) {
-            }
-        }
-    }
-
-    private boolean processUpdate(CallableStatement statement, OperationObserver observer)
-            throws Exception {
-        int updateCount = statement.getUpdateCount();
-        if (updateCount == -1) {
-            return false;
-        }
-        dataNode.getJdbcEventLogger().logUpdateCount(updateCount);
-        observer.nextCount(query, updateCount);
-
-        return true;
-    }
-
-    /**
-     * Creates a translator that adds parenthesis to no-param queries.
-     */
-    // see CAY-750 for the problem description
-    @Override
-    protected ProcedureTranslator createTranslator(Connection connection) {
-        ProcedureTranslator translator = new MySQLProcedureTranslator();
-        translator.setAdapter(dataNode.getAdapter());
-        translator.setQuery(query);
-        translator.setEntityResolver(dataNode.getEntityResolver());
-        translator.setConnection(connection);
-        translator.setJdbcEventLogger(dataNode.getJdbcEventLogger());
-        return translator;
-    }
-
-    // same as postgres translator - should we make this the default?
-    static class MySQLProcedureTranslator extends ProcedureTranslator {
-
-        @Override
-        protected String createSqlString() {
-
-            String sql = super.createSqlString();
-
-            // add empty parameter parenthesis
-            if (sql.endsWith("}") && !sql.endsWith(")}")) {
-                sql = sql.substring(0, sql.length() - 1) + "()}";
-            }
-
-            return sql;
-        }
-    }
+	public MySQLProcedureAction(ProcedureQuery query, DataNode dataNode) {
+		super(query, dataNode);
+	}
+
+	@Override
+	public void performAction(Connection connection, OperationObserver observer) throws SQLException, Exception {
+
+		processedResultSets = 0;
+
+		ProcedureTranslator transl = createTranslator(connection);
+
+		try (CallableStatement statement = (CallableStatement) transl.createStatement();) {
+
+			// this is one difference with super - we need to read the first
+			// result set
+			// without calling 'getMoreResults' - which may actually be a good
+			// default
+			// strategy?
+			boolean firstResult = statement.execute();
+
+			// read out parameters
+			readProcedureOutParameters(statement, observer);
+
+			// read first result
+			if (firstResult) {
+				processResultSet(statement, observer);
+			} else if (!processUpdate(statement, observer)) {
+				return;
+			}
+
+			// read the rest of the query
+			while (true) {
+				if (statement.getMoreResults()) {
+					processResultSet(statement, observer);
+				} else if (!processUpdate(statement, observer)) {
+					break;
+				}
+			}
+		}
+
+	}
+
+	private void processResultSet(CallableStatement statement, OperationObserver observer) throws Exception {
+		ResultSet rs = statement.getResultSet();
+
+		try {
+			RowDescriptor descriptor = describeResultSet(rs, processedResultSets++);
+			readResultSet(rs, descriptor, query, observer);
+		} finally {
+			try {
+				rs.close();
+			} catch (SQLException ex) {
+			}
+		}
+	}
+
+	private boolean processUpdate(CallableStatement statement, OperationObserver observer) throws Exception {
+		int updateCount = statement.getUpdateCount();
+		if (updateCount == -1) {
+			return false;
+		}
+		dataNode.getJdbcEventLogger().logUpdateCount(updateCount);
+		observer.nextCount(query, updateCount);
+
+		return true;
+	}
+
+	/**
+	 * Creates a translator that adds parenthesis to no-param queries.
+	 */
+	// see CAY-750 for the problem description
+	@Override
+	protected ProcedureTranslator createTranslator(Connection connection) {
+		ProcedureTranslator translator = new MySQLProcedureTranslator();
+		translator.setAdapter(dataNode.getAdapter());
+		translator.setQuery(query);
+		translator.setEntityResolver(dataNode.getEntityResolver());
+		translator.setConnection(connection);
+		translator.setJdbcEventLogger(dataNode.getJdbcEventLogger());
+		return translator;
+	}
+
+	// same as postgres translator - should we make this the default?
+	static class MySQLProcedureTranslator extends ProcedureTranslator {
+
+		@Override
+		protected String createSqlString() {
+
+			String sql = super.createSqlString();
+
+			// add empty parameter parenthesis
+			if (sql.endsWith("}") && !sql.endsWith(")}")) {
+				sql = sql.substring(0, sql.length() - 1) + "()}";
+			}
+
+			return sql;
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLSniffer.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLSniffer.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLSniffer.java
index a7c7457..2917fa2 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLSniffer.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/mysql/MySQLSniffer.java
@@ -36,52 +36,46 @@ import org.apache.cayenne.di.Inject;
  */
 public class MySQLSniffer implements DbAdapterDetector {
 
-    protected AdhocObjectFactory objectFactory;
+	protected AdhocObjectFactory objectFactory;
 
-    public MySQLSniffer(@Inject AdhocObjectFactory objectFactory) {
-        this.objectFactory = objectFactory;
-    }
+	public MySQLSniffer(@Inject AdhocObjectFactory objectFactory) {
+		this.objectFactory = objectFactory;
+	}
 
-    @Override
-    public DbAdapter createAdapter(DatabaseMetaData md) throws SQLException {
-        String dbName = md.getDatabaseProductName();
-        if (dbName == null || !dbName.toUpperCase().contains("MYSQL")) {
-            return null;
-        }
+	@Override
+	public DbAdapter createAdapter(DatabaseMetaData md) throws SQLException {
+		String dbName = md.getDatabaseProductName();
+		if (dbName == null || !dbName.toUpperCase().contains("MYSQL")) {
+			return null;
+		}
 
-        // if InnoDB is used as a default engine, allow PK
-        Statement statement = md.getConnection().createStatement();
-        boolean supportFK = false;
-        String adapterStorageEngine = MySQLAdapter.DEFAULT_STORAGE_ENGINE;
+		// if InnoDB is used as a default engine, allow PK
 
-        try {
-            // http://dev.mysql.com/doc/refman/5.0/en/storage-engines.html
-            // per link above "table type" concept is deprecated in favor of "storage
-            // engine". Not sure if we should check "storage_engine" variable and in what
-            // version of MySQL it got introduced...
-            ResultSet rs = statement.executeQuery("SHOW VARIABLES LIKE 'table_type'");
-            try {
-                if (rs.next()) {
-                    String storageEngine = rs.getString(2);
-                    if (storageEngine != null) {
-                        adapterStorageEngine = storageEngine;
-                        supportFK = storageEngine.toUpperCase().equals("INNODB");
-                    }
-                }
-            }
-            finally {
-                rs.close();
-            }
-        }
-        finally {
-            statement.close();
-        }
+		boolean supportFK = false;
+		String adapterStorageEngine = MySQLAdapter.DEFAULT_STORAGE_ENGINE;
 
-        MySQLAdapter adapter = objectFactory.newInstance(
-                MySQLAdapter.class,
-                MySQLAdapter.class.getName());
-        adapter.setSupportsFkConstraints(supportFK);
-        adapter.setStorageEngine(adapterStorageEngine);
-        return adapter;
-    }
+		try (Statement statement = md.getConnection().createStatement();) {
+			// http://dev.mysql.com/doc/refman/5.0/en/storage-engines.html
+			// per link above "table type" concept is deprecated in favor of
+			// "storage
+			// engine". Not sure if we should check "storage_engine" variable
+			// and in what
+			// version of MySQL it got introduced...
+
+			try (ResultSet rs = statement.executeQuery("SHOW VARIABLES LIKE 'table_type'");) {
+				if (rs.next()) {
+					String storageEngine = rs.getString(2);
+					if (storageEngine != null) {
+						adapterStorageEngine = storageEngine;
+						supportFK = storageEngine.toUpperCase().equals("INNODB");
+					}
+				}
+			}
+		}
+
+		MySQLAdapter adapter = objectFactory.newInstance(MySQLAdapter.class, MySQLAdapter.class.getName());
+		adapter.setSupportsFkConstraints(supportFK);
+		adapter.setStorageEngine(adapterStorageEngine);
+		return adapter;
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/openbase/OpenBasePkGenerator.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/openbase/OpenBasePkGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/openbase/OpenBasePkGenerator.java
index 3b9e9d7..bb3068f 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/openbase/OpenBasePkGenerator.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/openbase/OpenBasePkGenerator.java
@@ -43,262 +43,245 @@ import org.apache.cayenne.util.IDUtil;
  */
 public class OpenBasePkGenerator extends JdbcPkGenerator {
 
-    protected OpenBasePkGenerator(JdbcAdapter adapter) {
-        super(adapter);
-    }
-
-    /**
-     * Returns a non-repeating primary key for a given PK attribute. Since
-     * OpenBase-specific mechanism is used, key caching is disabled. Instead a database
-     * operation is performed on every call.
+	protected OpenBasePkGenerator(JdbcAdapter adapter) {
+		super(adapter);
+	}
+
+	/**
+	 * Returns a non-repeating primary key for a given PK attribute. Since
+	 * OpenBase-specific mechanism is used, key caching is disabled. Instead a
+	 * database operation is performed on every call.
+	 * 
+	 * @since 3.0
+	 */
+	@Override
+	public Object generatePk(DataNode node, DbAttribute pk) throws Exception {
+
+		DbEntity entity = (DbEntity) pk.getEntity();
+
+		switch (pk.getType()) {
+		case Types.BINARY:
+		case Types.VARBINARY:
+			return IDUtil.pseudoUniqueSecureByteSequence(pk.getMaxLength());
+		}
+
+		long value = longPkFromDatabase(node, entity);
+
+		if (pk.getType() == Types.BIGINT) {
+			return Long.valueOf(value);
+		} else {
+			// leaving it up to the user to ensure that PK does not exceed max
+			// int...
+			return Integer.valueOf((int) value);
+		}
+	}
+
+	/**
+	 * Generates new (unique and non-repeating) primary key for specified
+	 * DbEntity. Executed SQL looks like this:
+	 * 
+	 * <pre>
+	 *  NEWID FOR Table Column
+	 * </pre>
+	 * 
+	 * COLUMN must be marked as UNIQUE in order for this to work properly.
+	 * 
+	 * @since 3.0
+	 */
+	@Override
+	protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
+
+		String sql = newIDString(entity);
+		adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
+
+		try (Connection con = node.getDataSource().getConnection();) {
+
+			try (Statement st = con.createStatement();) {
+
+				try (ResultSet rs = st.executeQuery(sql);) {
+					// Object pk = null;
+					if (!rs.next()) {
+						throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName());
+					}
+					return rs.getLong(1);
+				}
+			}
+		}
+	}
+
+	/**
+	 * Returns SQL string that can generate new (unique and non-repeating)
+	 * primary key for specified DbEntity. No actual database operations are
+	 * performed.
+	 * 
+	 * @since 1.2
+	 */
+	protected String newIDString(DbEntity ent) {
+		if (ent.getPrimaryKeys() == null || ent.getPrimaryKeys().size() != 1) {
+			throw new CayenneRuntimeException("Error generating pk for DbEntity " + ent.getName()
+					+ ": pk must be single attribute");
+		}
+		DbAttribute primaryKeyAttribute = ent.getPrimaryKeys().iterator().next();
+
+		return "NEWID FOR " + ent.getName() + ' ' + primaryKeyAttribute.getName();
+	}
+
+	@Override
+	public void createAutoPk(DataNode node, List dbEntities) throws Exception {
+		// looks like generating a PK on top of an existing one does not
+		// result in errors...
+
+		// create needed sequences
+		Iterator<?> it = dbEntities.iterator();
+		while (it.hasNext()) {
+			DbEntity entity = (DbEntity) it.next();
+
+			// the caller must take care of giving us the right entities
+			// but lets check anyway
+			if (!canCreatePK(entity)) {
+				continue;
+			}
+
+			runUpdate(node, createPKString(entity));
+			runUpdate(node, createUniquePKIndexString(entity));
+		}
+	}
+
+	/**
      * 
-     * @since 3.0
      */
-    @Override
-    public Object generatePk(DataNode node, DbAttribute pk) throws Exception {
-
-        DbEntity entity = (DbEntity) pk.getEntity();
-
-        switch (pk.getType()) {
-            case Types.BINARY:
-            case Types.VARBINARY:
-                return IDUtil.pseudoUniqueSecureByteSequence(pk.getMaxLength());
-        }
-
-        long value = longPkFromDatabase(node, entity);
-
-        if (pk.getType() == Types.BIGINT) {
-            return Long.valueOf(value);
-        }
-        else {
-            // leaving it up to the user to ensure that PK does not exceed max int...
-            return Integer.valueOf((int) value);
-        }
-    }
-
-    /**
-     * Generates new (unique and non-repeating) primary key for specified DbEntity.
-     * Executed SQL looks like this:
+	@Override
+	public List createAutoPkStatements(List dbEntities) {
+		List<String> list = new ArrayList<String>(2 * dbEntities.size());
+		Iterator<?> it = dbEntities.iterator();
+		while (it.hasNext()) {
+			DbEntity entity = (DbEntity) it.next();
+
+			// the caller must take care of giving us the right entities
+			// but lets check anyway
+			if (!canCreatePK(entity)) {
+				continue;
+			}
+
+			list.add(createPKString(entity));
+			list.add(createUniquePKIndexString(entity));
+		}
+
+		return list;
+	}
+
+	protected boolean canCreatePK(DbEntity entity) {
+		return entity.getPrimaryKeys().size() > 0;
+	}
+
+	/**
      * 
-     * <pre>
-     *  NEWID FOR Table Column
-     * </pre>
-     * 
-     * COLUMN must be marked as UNIQUE in order for this to work properly.
-     * 
-     * @since 3.0
-     */
-    @Override
-    protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
-
-        String sql = newIDString(entity);
-        adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
-
-        Connection con = node.getDataSource().getConnection();
-        try {
-            Statement st = con.createStatement();
-            try {
-
-                ResultSet rs = st.executeQuery(sql);
-                try {
-                    // Object pk = null;
-                    if (!rs.next()) {
-                        throw new CayenneRuntimeException(
-                                "Error generating pk for DbEntity " + entity.getName());
-                    }
-                    return rs.getLong(1);
-                }
-                finally {
-                    rs.close();
-                }
-            }
-            finally {
-                st.close();
-            }
-        }
-        finally {
-            con.close();
-        }
-
-    }
-
-    /**
-     * Returns SQL string that can generate new (unique and non-repeating) primary key for
-     * specified DbEntity. No actual database operations are performed.
-     * 
-     * @since 1.2
-     */
-    protected String newIDString(DbEntity ent) {
-        if (ent.getPrimaryKeys() == null || ent.getPrimaryKeys().size() != 1) {
-            throw new CayenneRuntimeException("Error generating pk for DbEntity "
-                    + ent.getName()
-                    + ": pk must be single attribute");
-        }
-        DbAttribute primaryKeyAttribute = ent.getPrimaryKeys().iterator().next();
-
-        return "NEWID FOR " + ent.getName() + ' ' + primaryKeyAttribute.getName();
-    }
-
-    @Override
-    public void createAutoPk(DataNode node, List dbEntities) throws Exception {
-        // looks like generating a PK on top of an existing one does not
-        // result in errors...
-
-        // create needed sequences
-        Iterator<?> it = dbEntities.iterator();
-        while (it.hasNext()) {
-            DbEntity entity = (DbEntity) it.next();
-
-            // the caller must take care of giving us the right entities
-            // but lets check anyway
-            if (!canCreatePK(entity)) {
-                continue;
-            }
-
-            runUpdate(node, createPKString(entity));
-            runUpdate(node, createUniquePKIndexString(entity));
-        }
-    }
-
-    /**
-     * 
-     */
-    @Override
-    public List createAutoPkStatements(List dbEntities) {
-        List<String> list = new ArrayList<String>(2 * dbEntities.size());
-        Iterator<?> it = dbEntities.iterator();
-        while (it.hasNext()) {
-            DbEntity entity = (DbEntity) it.next();
-
-            // the caller must take care of giving us the right entities
-            // but lets check anyway
-            if (!canCreatePK(entity)) {
-                continue;
-            }
-
-            list.add(createPKString(entity));
-            list.add(createUniquePKIndexString(entity));
-        }
-
-        return list;
-    }
-
-    protected boolean canCreatePK(DbEntity entity) {
-        return entity.getPrimaryKeys().size() > 0;
-    }
-
-    /**
-     * 
-     */
-    @Override
-    public void dropAutoPk(DataNode node, List dbEntities) throws Exception {
-        // there is no simple way to do that... probably requires
-        // editing metadata tables...
-        // Good thing is that it doesn't matter, since PK support
-        // is attached to the table itself, so if a table is dropped,
-        // it will be dropped as well
-    }
-
-    /**
-     * Returns an empty list, since OpenBase doesn't support this operation.
-     */
-    @Override
-    public List dropAutoPkStatements(List dbEntities) {
-        return Collections.EMPTY_LIST;
-    }
-
-    /**
-     * Returns a String to create PK support for an entity.
-     */
-    protected String createPKString(DbEntity entity) {
-        Collection<DbAttribute> pk = entity.getPrimaryKeys();
-
-        if (pk == null || pk.size() == 0) {
-            throw new CayenneRuntimeException("Entity '"
-                    + entity.getName()
-                    + "' has no PK defined.");
-        }
-
-        StringBuilder buffer = new StringBuilder();
-        buffer.append("CREATE PRIMARY KEY ");
-      
-        QuotingStrategy context = getAdapter().getQuotingStrategy();
-
-        buffer.append(context.quotedIdentifier(entity, entity.getName()));
-
-        buffer.append(" (");
-
-        Iterator<DbAttribute> it = pk.iterator();
-
-        // at this point we know that there is at least on PK column
-        DbAttribute firstColumn = it.next();
-        buffer.append(context.quotedName(firstColumn));
-
-        while (it.hasNext()) {
-            DbAttribute column = it.next();
-            buffer.append(", ");
-            buffer.append(context.quotedName(column));
-        }
-
-        buffer.append(")");
-        return buffer.toString();
-    }
-
-    /**
-     * Returns a String to create a unique index on table primary key columns per OpenBase
-     * recommendations.
-     */
-    protected String createUniquePKIndexString(DbEntity entity) {
-        Collection<DbAttribute> pk = entity.getPrimaryKeys();
-
-        QuotingStrategy context = getAdapter().getQuotingStrategy();
-        if (pk == null || pk.size() == 0) {
-            throw new CayenneRuntimeException("Entity '"
-                    + entity.getName()
-                    + "' has no PK defined.");
-        }
-
-        StringBuilder buffer = new StringBuilder();
-
-        // compound PK doesn't work well with UNIQUE index...
-        // create a regular one in this case
-        buffer.append(pk.size() == 1 ? "CREATE UNIQUE INDEX " : "CREATE INDEX ");
-
-        buffer.append(context.quotedIdentifier(entity, entity.getName()));
-        buffer.append(" (");
-
-        Iterator<DbAttribute> it = pk.iterator();
-
-        // at this point we know that there is at least on PK column
-        DbAttribute firstColumn = it.next();
-        buffer.append(context.quotedName(firstColumn));
-
-        while (it.hasNext()) {
-            DbAttribute column = it.next();
-            buffer.append(", ");
-            buffer.append(context.quotedName(column));
-        }
-        buffer.append(")");
-        return buffer.toString();
-    }
-
-    @Override
-    public void reset() {
-        // noop
-    }
-
-    /**
-     * Returns zero, since PK caching is not feasible with OpenBase PK generation
-     * mechanism.
      */
-    @Override
-    public int getPkCacheSize() {
-        return 0;
-    }
-
-    @Override
-    public void setPkCacheSize(int pkCacheSize) {
-        // noop, no PK caching
-    }
+	@Override
+	public void dropAutoPk(DataNode node, List dbEntities) throws Exception {
+		// there is no simple way to do that... probably requires
+		// editing metadata tables...
+		// Good thing is that it doesn't matter, since PK support
+		// is attached to the table itself, so if a table is dropped,
+		// it will be dropped as well
+	}
+
+	/**
+	 * Returns an empty list, since OpenBase doesn't support this operation.
+	 */
+	@Override
+	public List dropAutoPkStatements(List dbEntities) {
+		return Collections.EMPTY_LIST;
+	}
+
+	/**
+	 * Returns a String to create PK support for an entity.
+	 */
+	protected String createPKString(DbEntity entity) {
+		Collection<DbAttribute> pk = entity.getPrimaryKeys();
+
+		if (pk == null || pk.size() == 0) {
+			throw new CayenneRuntimeException("Entity '" + entity.getName() + "' has no PK defined.");
+		}
+
+		StringBuilder buffer = new StringBuilder();
+		buffer.append("CREATE PRIMARY KEY ");
+
+		QuotingStrategy context = getAdapter().getQuotingStrategy();
+
+		buffer.append(context.quotedIdentifier(entity, entity.getName()));
+
+		buffer.append(" (");
+
+		Iterator<DbAttribute> it = pk.iterator();
+
+		// at this point we know that there is at least on PK column
+		DbAttribute firstColumn = it.next();
+		buffer.append(context.quotedName(firstColumn));
+
+		while (it.hasNext()) {
+			DbAttribute column = it.next();
+			buffer.append(", ");
+			buffer.append(context.quotedName(column));
+		}
+
+		buffer.append(")");
+		return buffer.toString();
+	}
+
+	/**
+	 * Returns a String to create a unique index on table primary key columns
+	 * per OpenBase recommendations.
+	 */
+	protected String createUniquePKIndexString(DbEntity entity) {
+		Collection<DbAttribute> pk = entity.getPrimaryKeys();
+
+		QuotingStrategy context = getAdapter().getQuotingStrategy();
+		if (pk == null || pk.size() == 0) {
+			throw new CayenneRuntimeException("Entity '" + entity.getName() + "' has no PK defined.");
+		}
+
+		StringBuilder buffer = new StringBuilder();
+
+		// compound PK doesn't work well with UNIQUE index...
+		// create a regular one in this case
+		buffer.append(pk.size() == 1 ? "CREATE UNIQUE INDEX " : "CREATE INDEX ");
+
+		buffer.append(context.quotedIdentifier(entity, entity.getName()));
+		buffer.append(" (");
+
+		Iterator<DbAttribute> it = pk.iterator();
+
+		// at this point we know that there is at least on PK column
+		DbAttribute firstColumn = it.next();
+		buffer.append(context.quotedName(firstColumn));
+
+		while (it.hasNext()) {
+			DbAttribute column = it.next();
+			buffer.append(", ");
+			buffer.append(context.quotedName(column));
+		}
+		buffer.append(")");
+		return buffer.toString();
+	}
+
+	@Override
+	public void reset() {
+		// noop
+	}
+
+	/**
+	 * Returns zero, since PK caching is not feasible with OpenBase PK
+	 * generation mechanism.
+	 */
+	@Override
+	public int getPkCacheSize() {
+		return 0;
+	}
+
+	@Override
+	public void setPkCacheSize(int pkCacheSize) {
+		// noop, no PK caching
+	}
 
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchAction.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchAction.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchAction.java
index 59766cb..8c6a7fb 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchAction.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchAction.java
@@ -51,223 +51,201 @@ import org.apache.cayenne.util.Util;
  */
 class Oracle8LOBBatchAction implements SQLAction {
 
-    private BatchQuery query;
-    private DbAdapter adapter;
-    private JdbcEventLogger logger;
-
-    private static void bind(DbAdapter adapter, PreparedStatement statement, ParameterBinding[] bindings)
-            throws SQLException, Exception {
+	private BatchQuery query;
+	private DbAdapter adapter;
+	private JdbcEventLogger logger;
 
-        for (ParameterBinding b : bindings) {
-            adapter.bindParameter(statement, b.getValue(), b.getStatementPosition(), b.getAttribute().getType(), b
-                    .getAttribute().getScale());
-        }
-    }
+	private static void bind(DbAdapter adapter, PreparedStatement statement, ParameterBinding[] bindings)
+			throws SQLException, Exception {
 
-    Oracle8LOBBatchAction(BatchQuery query, DbAdapter adapter, JdbcEventLogger logger) {
-        this.adapter = adapter;
-        this.query = query;
-        this.logger = logger;
-    }
+		for (ParameterBinding b : bindings) {
+			adapter.bindParameter(statement, b.getValue(), b.getStatementPosition(), b.getAttribute().getType(), b
+					.getAttribute().getScale());
+		}
+	}
 
-    @Override
-    public void performAction(Connection connection, OperationObserver observer) throws SQLException, Exception {
+	Oracle8LOBBatchAction(BatchQuery query, DbAdapter adapter, JdbcEventLogger logger) {
+		this.adapter = adapter;
+		this.query = query;
+		this.logger = logger;
+	}
 
-        Oracle8LOBBatchTranslator translator;
-        if (query instanceof InsertBatchQuery) {
-            translator = new Oracle8LOBInsertBatchTranslator((InsertBatchQuery) query, adapter, OracleAdapter.TRIM_FUNCTION);
-        } else if (query instanceof UpdateBatchQuery) {
-            translator = new Oracle8LOBUpdateBatchTranslator((UpdateBatchQuery) query, adapter, OracleAdapter.TRIM_FUNCTION);
-        } else {
-            throw new CayenneException("Unsupported batch type for special LOB processing: " + query);
-        }
+	@Override
+	public void performAction(Connection connection, OperationObserver observer) throws SQLException, Exception {
 
-        translator.setNewBlobFunction(OracleAdapter.NEW_BLOB_FUNCTION);
-        translator.setNewClobFunction(OracleAdapter.NEW_CLOB_FUNCTION);
-
-        // no batching is done, queries are translated
-        // for each batch set, since prepared statements
-        // may be different depending on whether LOBs are NULL or not..
-
-        Oracle8LOBBatchQueryWrapper selectQuery = new Oracle8LOBBatchQueryWrapper(query);
-        List<DbAttribute> qualifierAttributes = selectQuery.getDbAttributesForLOBSelectQualifier();
-
-        for (BatchQueryRow row : query.getRows()) {
-
-            selectQuery.indexLOBAttributes(row);
-
-            int updated = 0;
-            String updateStr = translator.createSql(row);
-
-            // 1. run row update
-            logger.logQuery(updateStr, Collections.EMPTY_LIST);
-            PreparedStatement statement = connection.prepareStatement(updateStr);
-            try {
-
-                ParameterBinding[] bindings = translator.updateBindings(row);
-                logger.logQueryParameters("bind", bindings);
-
-                bind(adapter, statement, bindings);
-
-                updated = statement.executeUpdate();
-                logger.logUpdateCount(updated);
-            } finally {
-                try {
-                    statement.close();
-                } catch (Exception e) {
-                }
-            }
-
-            // 2. run row LOB update (SELECT...FOR UPDATE and writing out LOBs)
-            processLOBRow(connection, translator, selectQuery, qualifierAttributes, row);
-
-            // finally, notify delegate that the row was updated
-            observer.nextCount(query, updated);
-        }
-    }
-
-    void processLOBRow(Connection con, Oracle8LOBBatchTranslator queryBuilder, Oracle8LOBBatchQueryWrapper selectQuery,
-            List<DbAttribute> qualifierAttributes, BatchQueryRow row) throws SQLException, Exception {
-
-        List<DbAttribute> lobAttributes = selectQuery.getDbAttributesForUpdatedLOBColumns();
-        if (lobAttributes.size() == 0) {
-            return;
-        }
-
-        boolean isLoggable = logger.isLoggable();
-
-        List<Object> qualifierValues = selectQuery.getValuesForLOBSelectQualifier(row);
-        List<Object> lobValues = selectQuery.getValuesForUpdatedLOBColumns();
-        int parametersSize = qualifierValues.size();
-        int lobSize = lobAttributes.size();
-
-        String selectStr = queryBuilder.createLOBSelectString(lobAttributes, qualifierAttributes);
-
-        if (isLoggable) {
-            logger.logQuery(selectStr, qualifierValues);
-            logger.logQueryParameters("write LOB", null, lobValues, false);
-        }
-
-        PreparedStatement selectStatement = con.prepareStatement(selectStr);
-        try {
-            for (int i = 0; i < parametersSize; i++) {
-                Object value = qualifierValues.get(i);
-                DbAttribute attribute = qualifierAttributes.get(i);
-
-                adapter.bindParameter(selectStatement, value, i + 1, attribute.getType(), attribute.getScale());
-            }
-
-            ResultSet result = selectStatement.executeQuery();
-
-            try {
-                if (!result.next()) {
-                    throw new CayenneRuntimeException("Missing LOB row.");
-                }
-
-                // read the only expected row
-
-                for (int i = 0; i < lobSize; i++) {
-                    DbAttribute attribute = lobAttributes.get(i);
-                    int type = attribute.getType();
-
-                    if (type == Types.CLOB) {
-                        Clob clob = result.getClob(i + 1);
-                        Object clobVal = lobValues.get(i);
-
-                        if (clobVal instanceof char[]) {
-                            writeClob(clob, (char[]) clobVal);
-                        } else {
-                            writeClob(clob, clobVal.toString());
-                        }
-                    } else if (type == Types.BLOB) {
-                        Blob blob = result.getBlob(i + 1);
-
-                        Object blobVal = lobValues.get(i);
-                        if (blobVal instanceof byte[]) {
-                            writeBlob(blob, (byte[]) blobVal);
-                        } else {
-                            String className = (blobVal != null) ? blobVal.getClass().getName() : null;
-                            throw new CayenneRuntimeException("Unsupported class of BLOB value: " + className);
-                        }
-                    } else {
-                        throw new CayenneRuntimeException("Only BLOB or CLOB is expected here, got: " + type);
-                    }
-                }
-
-                if (result.next()) {
-                    throw new CayenneRuntimeException("More than one LOB row found.");
-                }
-            } finally {
-                try {
-                    result.close();
-                } catch (Exception e) {
-                }
-            }
-        } finally {
-            try {
-                selectStatement.close();
-            } catch (Exception e) {
-            }
-        }
-    }
-
-    /**
-     * Override the Oracle writeBlob() method to be compatible with Oracle8
-     * drivers.
-     */
-    protected void writeBlob(Blob blob, byte[] value) {
-        // Fix for CAY-1307. For Oracle8, get the method found by reflection in
-        // OracleAdapter. (Code taken from Cayenne 2.)
-        Method getBinaryStreamMethod = Oracle8Adapter.getOutputStreamFromBlobMethod();
-        try {
-            OutputStream out = (OutputStream) getBinaryStreamMethod.invoke(blob, (Object[]) null);
-            try {
-                out.write(value);
-                out.flush();
-            } finally {
-                out.close();
-            }
-        } catch (Exception e) {
-            throw new CayenneRuntimeException("Error processing BLOB.", Util.unwindException(e));
-        }
-    }
-
-    /**
-     * Override the Oracle writeClob() method to be compatible with Oracle8
-     * drivers.
-     */
-    protected void writeClob(Clob clob, char[] value) {
-        Method getWriterMethod = Oracle8Adapter.getWriterFromClobMethod();
-        try {
-            Writer out = (Writer) getWriterMethod.invoke(clob, (Object[]) null);
-            try {
-                out.write(value);
-                out.flush();
-            } finally {
-                out.close();
-            }
-
-        } catch (Exception e) {
-            throw new CayenneRuntimeException("Error processing CLOB.", Util.unwindException(e));
-        }
-    }
-
-    /**
-     * Override the Oracle writeClob() method to be compatible with Oracle8
-     * drivers.
-     */
-    protected void writeClob(Clob clob, String value) {
-        Method getWriterMethod = Oracle8Adapter.getWriterFromClobMethod();
-        try {
-            Writer out = (Writer) getWriterMethod.invoke(clob, (Object[]) null);
-            try {
-                out.write(value);
-                out.flush();
-            } finally {
-                out.close();
-            }
-        } catch (Exception e) {
-            throw new CayenneRuntimeException("Error processing CLOB.", Util.unwindException(e));
-        }
-    }
+		Oracle8LOBBatchTranslator translator;
+		if (query instanceof InsertBatchQuery) {
+			translator = new Oracle8LOBInsertBatchTranslator((InsertBatchQuery) query, adapter,
+					OracleAdapter.TRIM_FUNCTION);
+		} else if (query instanceof UpdateBatchQuery) {
+			translator = new Oracle8LOBUpdateBatchTranslator((UpdateBatchQuery) query, adapter,
+					OracleAdapter.TRIM_FUNCTION);
+		} else {
+			throw new CayenneException("Unsupported batch type for special LOB processing: " + query);
+		}
+
+		translator.setNewBlobFunction(OracleAdapter.NEW_BLOB_FUNCTION);
+		translator.setNewClobFunction(OracleAdapter.NEW_CLOB_FUNCTION);
+
+		// no batching is done, queries are translated
+		// for each batch set, since prepared statements
+		// may be different depending on whether LOBs are NULL or not..
+
+		Oracle8LOBBatchQueryWrapper selectQuery = new Oracle8LOBBatchQueryWrapper(query);
+		List<DbAttribute> qualifierAttributes = selectQuery.getDbAttributesForLOBSelectQualifier();
+
+		for (BatchQueryRow row : query.getRows()) {
+
+			selectQuery.indexLOBAttributes(row);
+
+			int updated = 0;
+			String updateStr = translator.createSql(row);
+
+			// 1. run row update
+			logger.logQuery(updateStr, Collections.EMPTY_LIST);
+
+			try (PreparedStatement statement = connection.prepareStatement(updateStr);) {
+
+				ParameterBinding[] bindings = translator.updateBindings(row);
+				logger.logQueryParameters("bind", bindings);
+
+				bind(adapter, statement, bindings);
+
+				updated = statement.executeUpdate();
+				logger.logUpdateCount(updated);
+			}
+
+			// 2. run row LOB update (SELECT...FOR UPDATE and writing out LOBs)
+			processLOBRow(connection, translator, selectQuery, qualifierAttributes, row);
+
+			// finally, notify delegate that the row was updated
+			observer.nextCount(query, updated);
+		}
+	}
+
+	void processLOBRow(Connection con, Oracle8LOBBatchTranslator queryBuilder, Oracle8LOBBatchQueryWrapper selectQuery,
+			List<DbAttribute> qualifierAttributes, BatchQueryRow row) throws SQLException, Exception {
+
+		List<DbAttribute> lobAttributes = selectQuery.getDbAttributesForUpdatedLOBColumns();
+		if (lobAttributes.size() == 0) {
+			return;
+		}
+
+		boolean isLoggable = logger.isLoggable();
+
+		List<Object> qualifierValues = selectQuery.getValuesForLOBSelectQualifier(row);
+		List<Object> lobValues = selectQuery.getValuesForUpdatedLOBColumns();
+		int parametersSize = qualifierValues.size();
+		int lobSize = lobAttributes.size();
+
+		String selectStr = queryBuilder.createLOBSelectString(lobAttributes, qualifierAttributes);
+
+		if (isLoggable) {
+			logger.logQuery(selectStr, qualifierValues);
+			logger.logQueryParameters("write LOB", null, lobValues, false);
+		}
+
+		try (PreparedStatement selectStatement = con.prepareStatement(selectStr);) {
+			for (int i = 0; i < parametersSize; i++) {
+				Object value = qualifierValues.get(i);
+				DbAttribute attribute = qualifierAttributes.get(i);
+
+				adapter.bindParameter(selectStatement, value, i + 1, attribute.getType(), attribute.getScale());
+			}
+
+			try (ResultSet result = selectStatement.executeQuery();) {
+				if (!result.next()) {
+					throw new CayenneRuntimeException("Missing LOB row.");
+				}
+
+				// read the only expected row
+
+				for (int i = 0; i < lobSize; i++) {
+					DbAttribute attribute = lobAttributes.get(i);
+					int type = attribute.getType();
+
+					if (type == Types.CLOB) {
+						Clob clob = result.getClob(i + 1);
+						Object clobVal = lobValues.get(i);
+
+						if (clobVal instanceof char[]) {
+							writeClob(clob, (char[]) clobVal);
+						} else {
+							writeClob(clob, clobVal.toString());
+						}
+					} else if (type == Types.BLOB) {
+						Blob blob = result.getBlob(i + 1);
+
+						Object blobVal = lobValues.get(i);
+						if (blobVal instanceof byte[]) {
+							writeBlob(blob, (byte[]) blobVal);
+						} else {
+							String className = (blobVal != null) ? blobVal.getClass().getName() : null;
+							throw new CayenneRuntimeException("Unsupported class of BLOB value: " + className);
+						}
+					} else {
+						throw new CayenneRuntimeException("Only BLOB or CLOB is expected here, got: " + type);
+					}
+				}
+
+				if (result.next()) {
+					throw new CayenneRuntimeException("More than one LOB row found.");
+				}
+			}
+		}
+	}
+
+	/**
+	 * Override the Oracle writeBlob() method to be compatible with Oracle8
+	 * drivers.
+	 */
+	protected void writeBlob(Blob blob, byte[] value) {
+		// Fix for CAY-1307. For Oracle8, get the method found by reflection in
+		// OracleAdapter. (Code taken from Cayenne 2.)
+		Method getBinaryStreamMethod = Oracle8Adapter.getOutputStreamFromBlobMethod();
+		try {
+
+			try (OutputStream out = (OutputStream) getBinaryStreamMethod.invoke(blob, (Object[]) null);) {
+				out.write(value);
+				out.flush();
+			}
+		} catch (Exception e) {
+			throw new CayenneRuntimeException("Error processing BLOB.", Util.unwindException(e));
+		}
+	}
+
+	/**
+	 * Override the Oracle writeClob() method to be compatible with Oracle8
+	 * drivers.
+	 */
+	protected void writeClob(Clob clob, char[] value) {
+		Method getWriterMethod = Oracle8Adapter.getWriterFromClobMethod();
+		try {
+
+			try (Writer out = (Writer) getWriterMethod.invoke(clob, (Object[]) null);) {
+				out.write(value);
+				out.flush();
+			}
+
+		} catch (Exception e) {
+			throw new CayenneRuntimeException("Error processing CLOB.", Util.unwindException(e));
+		}
+	}
+
+	/**
+	 * Override the Oracle writeClob() method to be compatible with Oracle8
+	 * drivers.
+	 */
+	protected void writeClob(Clob clob, String value) {
+		Method getWriterMethod = Oracle8Adapter.getWriterFromClobMethod();
+		try {
+
+			try (Writer out = (Writer) getWriterMethod.invoke(clob, (Object[]) null);) {
+				out.write(value);
+				out.flush();
+			}
+		} catch (Exception e) {
+			throw new CayenneRuntimeException("Error processing CLOB.", Util.unwindException(e));
+		}
+	}
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchQueryWrapper.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchQueryWrapper.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchQueryWrapper.java
index 5632e73..4cfee1b 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchQueryWrapper.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/Oracle8LOBBatchQueryWrapper.java
@@ -39,168 +39,166 @@ import org.apache.cayenne.query.BatchQueryRow;
  */
 class Oracle8LOBBatchQueryWrapper {
 
-    protected BatchQuery query;
-
-    protected List<DbAttribute> dbAttributes;
-
-    // attribute list decoders
-    protected boolean[] qualifierAttributes;
-    protected boolean[] allLOBAttributes;
-    protected Object[] updatedLOBAttributes;
-
-    Oracle8LOBBatchQueryWrapper(BatchQuery query) {
-        this.query = query;
-        this.dbAttributes = query.getDbAttributes();
-
-        int len = dbAttributes.size();
-        this.qualifierAttributes = new boolean[len];
-        this.allLOBAttributes = new boolean[len];
-        this.updatedLOBAttributes = new Object[len];
-
-        indexQualifierAttributes();
-    }
-
-    /**
-     * Indexes attributes
-     */
-    protected void indexQualifierAttributes() {
-        int len = this.dbAttributes.size();
-        for (int i = 0; i < len; i++) {
-            DbAttribute attribute = this.dbAttributes.get(i);
-            int type = attribute.getType();
-            qualifierAttributes[i] = attribute.isPrimaryKey();
-            allLOBAttributes[i] = type == Types.BLOB || type == Types.CLOB;
-        }
-    }
-
-    /**
-     * Indexes attributes
-     */
-    void indexLOBAttributes(BatchQueryRow row) {
-        int len = updatedLOBAttributes.length;
-        for (int i = 0; i < len; i++) {
-            updatedLOBAttributes[i] = null;
-
-            if (allLOBAttributes[i]) {
-                // skip null and empty LOBs
-                Object value = row.getValue(i);
-
-                if (value == null) {
-                    continue;
-                }
-
-                if (dbAttributes.get(i).getType() == Types.BLOB) {
-                    updatedLOBAttributes[i] = convertToBlobValue(value);
-                } else {
-                    updatedLOBAttributes[i] = convertToClobValue(value);
-                }
-            }
-        }
-    }
-
-    /**
-     * Converts value to byte[] if possible.
-     */
-    protected byte[] convertToBlobValue(Object value) {
-        if (value instanceof byte[]) {
-            byte[] bytes = (byte[]) value;
-            return bytes.length == 0 ? null : bytes;
-        } else if (value instanceof Serializable) {
-            ByteArrayOutputStream bytes = new ByteArrayOutputStream() {
-
-                @Override
-                public synchronized byte[] toByteArray() {
-                    return buf;
-                }
-            };
-
-            try {
-                ObjectOutputStream out = new ObjectOutputStream(bytes);
-                out.writeObject(value);
-                out.close();
-            } catch (IOException e) {
-                throw new CayenneRuntimeException("Error serializing object", e);
-            }
-
-            return bytes.toByteArray();
-        }
-
-        return null;
-    }
-
-    /**
-     * Converts to char[] or String. Both are acceptable when writing CLOBs.
-     */
-    protected Object convertToClobValue(Object value) {
-
-        if (value instanceof char[]) {
-            char[] chars = (char[]) value;
-            return chars.length == 0 ? null : chars;
-        } else {
-            String strValue = value.toString();
-            return strValue.length() == 0 ? null : strValue;
-        }
-    }
-
-    /**
-     * Returns a list of DbAttributes used in the qualifier of the query that
-     * selects a LOB row for LOB update.
-     */
-    List<DbAttribute> getDbAttributesForLOBSelectQualifier() {
-
-        int len = qualifierAttributes.length;
-        List<DbAttribute> attributes = new ArrayList<DbAttribute>(len);
-
-        for (int i = 0; i < len; i++) {
-            if (this.qualifierAttributes[i]) {
-                attributes.add(this.dbAttributes.get(i));
-            }
-        }
-        return attributes;
-    }
-
-    /**
-     * Returns a list of DbAttributes that correspond to the LOB columns updated
-     * in the current row in the batch query. The list will not include LOB
-     * attributes that are null or empty.
-     */
-    List<DbAttribute> getDbAttributesForUpdatedLOBColumns() {
-
-        int len = updatedLOBAttributes.length;
-        List<DbAttribute> attributes = new ArrayList<DbAttribute>(len);
-
-        for (int i = 0; i < len; i++) {
-            if (this.updatedLOBAttributes[i] != null) {
-                attributes.add(this.dbAttributes.get(i));
-            }
-        }
-        return attributes;
-    }
-
-    List<Object> getValuesForLOBSelectQualifier(BatchQueryRow row) {
-
-        int len = this.qualifierAttributes.length;
-        List<Object> values = new ArrayList<Object>(len);
-        for (int i = 0; i < len; i++) {
-            if (this.qualifierAttributes[i]) {
-                values.add(row.getValue(i));
-            }
-        }
-
-        return values;
-    }
-
-    List<Object> getValuesForUpdatedLOBColumns() {
-
-        int len = this.updatedLOBAttributes.length;
-        List<Object> values = new ArrayList<Object>(len);
-        for (int i = 0; i < len; i++) {
-            if (this.updatedLOBAttributes[i] != null) {
-                values.add(this.updatedLOBAttributes[i]);
-            }
-        }
-
-        return values;
-    }
+	protected BatchQuery query;
+
+	protected List<DbAttribute> dbAttributes;
+
+	// attribute list decoders
+	protected boolean[] qualifierAttributes;
+	protected boolean[] allLOBAttributes;
+	protected Object[] updatedLOBAttributes;
+
+	Oracle8LOBBatchQueryWrapper(BatchQuery query) {
+		this.query = query;
+		this.dbAttributes = query.getDbAttributes();
+
+		int len = dbAttributes.size();
+		this.qualifierAttributes = new boolean[len];
+		this.allLOBAttributes = new boolean[len];
+		this.updatedLOBAttributes = new Object[len];
+
+		indexQualifierAttributes();
+	}
+
+	/**
+	 * Indexes attributes
+	 */
+	protected void indexQualifierAttributes() {
+		int len = this.dbAttributes.size();
+		for (int i = 0; i < len; i++) {
+			DbAttribute attribute = this.dbAttributes.get(i);
+			int type = attribute.getType();
+			qualifierAttributes[i] = attribute.isPrimaryKey();
+			allLOBAttributes[i] = type == Types.BLOB || type == Types.CLOB;
+		}
+	}
+
+	/**
+	 * Indexes attributes
+	 */
+	void indexLOBAttributes(BatchQueryRow row) {
+		int len = updatedLOBAttributes.length;
+		for (int i = 0; i < len; i++) {
+			updatedLOBAttributes[i] = null;
+
+			if (allLOBAttributes[i]) {
+				// skip null and empty LOBs
+				Object value = row.getValue(i);
+
+				if (value == null) {
+					continue;
+				}
+
+				if (dbAttributes.get(i).getType() == Types.BLOB) {
+					updatedLOBAttributes[i] = convertToBlobValue(value);
+				} else {
+					updatedLOBAttributes[i] = convertToClobValue(value);
+				}
+			}
+		}
+	}
+
+	/**
+	 * Converts value to byte[] if possible.
+	 */
+	protected byte[] convertToBlobValue(Object value) {
+		if (value instanceof byte[]) {
+			byte[] bytes = (byte[]) value;
+			return bytes.length == 0 ? null : bytes;
+		} else if (value instanceof Serializable) {
+			ByteArrayOutputStream bytes = new ByteArrayOutputStream() {
+
+				@Override
+				public synchronized byte[] toByteArray() {
+					return buf;
+				}
+			};
+
+			try (ObjectOutputStream out = new ObjectOutputStream(bytes);) {
+				out.writeObject(value);
+			} catch (IOException e) {
+				throw new CayenneRuntimeException("Error serializing object", e);
+			}
+
+			return bytes.toByteArray();
+		}
+
+		return null;
+	}
+
+	/**
+	 * Converts to char[] or String. Both are acceptable when writing CLOBs.
+	 */
+	protected Object convertToClobValue(Object value) {
+
+		if (value instanceof char[]) {
+			char[] chars = (char[]) value;
+			return chars.length == 0 ? null : chars;
+		} else {
+			String strValue = value.toString();
+			return strValue.length() == 0 ? null : strValue;
+		}
+	}
+
+	/**
+	 * Returns a list of DbAttributes used in the qualifier of the query that
+	 * selects a LOB row for LOB update.
+	 */
+	List<DbAttribute> getDbAttributesForLOBSelectQualifier() {
+
+		int len = qualifierAttributes.length;
+		List<DbAttribute> attributes = new ArrayList<DbAttribute>(len);
+
+		for (int i = 0; i < len; i++) {
+			if (this.qualifierAttributes[i]) {
+				attributes.add(this.dbAttributes.get(i));
+			}
+		}
+		return attributes;
+	}
+
+	/**
+	 * Returns a list of DbAttributes that correspond to the LOB columns updated
+	 * in the current row in the batch query. The list will not include LOB
+	 * attributes that are null or empty.
+	 */
+	List<DbAttribute> getDbAttributesForUpdatedLOBColumns() {
+
+		int len = updatedLOBAttributes.length;
+		List<DbAttribute> attributes = new ArrayList<DbAttribute>(len);
+
+		for (int i = 0; i < len; i++) {
+			if (this.updatedLOBAttributes[i] != null) {
+				attributes.add(this.dbAttributes.get(i));
+			}
+		}
+		return attributes;
+	}
+
+	List<Object> getValuesForLOBSelectQualifier(BatchQueryRow row) {
+
+		int len = this.qualifierAttributes.length;
+		List<Object> values = new ArrayList<Object>(len);
+		for (int i = 0; i < len; i++) {
+			if (this.qualifierAttributes[i]) {
+				values.add(row.getValue(i));
+			}
+		}
+
+		return values;
+	}
+
+	List<Object> getValuesForUpdatedLOBColumns() {
+
+		int len = this.updatedLOBAttributes.length;
+		List<Object> values = new ArrayList<Object>(len);
+		for (int i = 0; i < len; i++) {
+			if (this.updatedLOBAttributes[i] != null) {
+				values.add(this.updatedLOBAttributes[i]);
+			}
+		}
+
+		return values;
+	}
 
 }

http://git-wip-us.apache.org/repos/asf/cayenne/blob/26d8434d/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OraclePkGenerator.java
----------------------------------------------------------------------
diff --git a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OraclePkGenerator.java b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OraclePkGenerator.java
index 6f775db..4c0f630 100644
--- a/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OraclePkGenerator.java
+++ b/cayenne-server/src/main/java/org/apache/cayenne/dba/oracle/OraclePkGenerator.java
@@ -54,201 +54,186 @@ import org.apache.cayenne.map.DbKeyGenerator;
  */
 public class OraclePkGenerator extends JdbcPkGenerator {
 
-    protected OraclePkGenerator(JdbcAdapter adapter) {
-        super(adapter);
-    }
-
-    private static final String _SEQUENCE_PREFIX = "pk_";
-
-    @Override
-    public void createAutoPk(DataNode node, List dbEntities) throws Exception {
-        List sequences = getExistingSequences(node);
-
-        // create needed sequences
-        Iterator it = dbEntities.iterator();
-        while (it.hasNext()) {
-            DbEntity ent = (DbEntity) it.next();
-            if (!sequences.contains(sequenceName(ent))) {
-                runUpdate(node, createSequenceString(ent));
-            }
-        }
-    }
-
-    @Override
-    public List createAutoPkStatements(List dbEntities) {
-        List<String> list = new ArrayList<String>();
-        Iterator it = dbEntities.iterator();
-        while (it.hasNext()) {
-            DbEntity ent = (DbEntity) it.next();
-            list.add(createSequenceString(ent));
-        }
-
-        return list;
-    }
-
-    @Override
-    public void dropAutoPk(DataNode node, List dbEntities) throws Exception {
-        List sequences = getExistingSequences(node);
-
-        // drop obsolete sequences
-        Iterator it = dbEntities.iterator();
-        while (it.hasNext()) {
-            DbEntity ent = (DbEntity) it.next();
-            String name;
-            if (ent.getDataMap().isQuotingSQLIdentifiers()) {
-                DbEntity tempEnt = new DbEntity();
-                DataMap dm = new DataMap();
-                dm.setQuotingSQLIdentifiers(false);
-                tempEnt.setDataMap(dm);
-                tempEnt.setName(ent.getName());
-                name = stripSchemaName(sequenceName(tempEnt));
-            } else {
-                name = stripSchemaName(sequenceName(ent));
-            }
-            if (sequences.contains(name)) {
-                runUpdate(node, dropSequenceString(ent));
-            }
-        }
-    }
-
-    @Override
-    public List dropAutoPkStatements(List dbEntities) {
-        List<String> list = new ArrayList<String>();
-        Iterator it = dbEntities.iterator();
-        while (it.hasNext()) {
-            DbEntity ent = (DbEntity) it.next();
-            list.add(dropSequenceString(ent));
-        }
-
-        return list;
-    }
-
-    protected String createSequenceString(DbEntity ent) {
-        return "CREATE SEQUENCE " + sequenceName(ent) + " START WITH " + pkStartValue
-                + " INCREMENT BY " + pkCacheSize(ent);
-    }
-
-    /**
-     * Returns a SQL string needed to drop any database objects associated with
-     * automatic primary key generation process for a specific DbEntity.
-     */
-    protected String dropSequenceString(DbEntity ent) {
-
-        return "DROP SEQUENCE " + sequenceName(ent);
-    }
-
-    /**
-     * Generates primary key by calling Oracle sequence corresponding to the
-     * <code>dbEntity</code>. Executed SQL looks like this:
-     * 
-     * <pre>
-     *   SELECT pk_table_name.nextval FROM DUAL
-     * </pre>
-     * 
-     * @since 3.0
-     */
-    @Override
-    protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
-
-        DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator();
-        String pkGeneratingSequenceName;
-        if (pkGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType())
-                && pkGenerator.getGeneratorName() != null) {
-            pkGeneratingSequenceName = pkGenerator.getGeneratorName();
-        } else {
-            pkGeneratingSequenceName = sequenceName(entity);
-        }
-
-        Connection con = node.getDataSource().getConnection();
-        try {
-            Statement st = con.createStatement();
-            try {
-                String sql = "SELECT " + pkGeneratingSequenceName + ".nextval FROM DUAL";
-                adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
-                ResultSet rs = st.executeQuery(sql);
-                try {
-                    // Object pk = null;
-                    if (!rs.next()) {
-                        throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName());
-                    }
-                    return rs.getLong(1);
-                } finally {
-                    rs.close();
-                }
-            } finally {
-                st.close();
-            }
-        } finally {
-            con.close();
-        }
-
-    }
-
-    protected int pkCacheSize(DbEntity entity) {
-        // use custom generator if possible
-        DbKeyGenerator keyGenerator = entity.getPrimaryKeyGenerator();
-        if (keyGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(keyGenerator.getGeneratorType())
-                && keyGenerator.getGeneratorName() != null) {
-
-            Integer size = keyGenerator.getKeyCacheSize();
-            return (size != null && size.intValue() >= 1) ? size.intValue() : super.getPkCacheSize();
-        } else {
-            return super.getPkCacheSize();
-        }
-    }
-
-    /** Returns expected primary key sequence name for a DbEntity. */
-    protected String sequenceName(DbEntity entity) {
-
-        // use custom generator if possible
-        DbKeyGenerator keyGenerator = entity.getPrimaryKeyGenerator();
-        if (keyGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(keyGenerator.getGeneratorType())
-                && keyGenerator.getGeneratorName() != null) {
-
-            return keyGenerator.getGeneratorName().toLowerCase();
-        } else {
-            String entName = entity.getName();
-            String seqName = _SEQUENCE_PREFIX + entName.toLowerCase();
-
-            return adapter.getQuotingStrategy().quotedIdentifier(entity, entity.getCatalog(), entity.getSchema(),
-                    seqName);
-        }
-    }
-
-    protected String stripSchemaName(String sequenceName) {
-        int ind = sequenceName.indexOf('.');
-        return ind >= 0 ? sequenceName.substring(ind + 1) : sequenceName;
-    }
-
-    /**
-     * Fetches a list of existing sequences that might match Cayenne generated
-     * ones.
-     */
-    protected List getExistingSequences(DataNode node) throws SQLException {
-
-        // check existing sequences
-        Connection con = node.getDataSource().getConnection();
-
-        try {
-            Statement sel = con.createStatement();
-            try {
-                String sql = "SELECT LOWER(SEQUENCE_NAME) FROM ALL_SEQUENCES";
-                adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
-                ResultSet rs = sel.executeQuery(sql);
-                try {
-                    List<String> sequenceList = new ArrayList<String>();
-                    while (rs.next()) {
-                        sequenceList.add(rs.getString(1));
-                    }
-                    return sequenceList;
-                } finally {
-                    rs.close();
-                }
-            } finally {
-                sel.close();
-            }
-        } finally {
-            con.close();
-        }
-    }
+	protected OraclePkGenerator(JdbcAdapter adapter) {
+		super(adapter);
+	}
+
+	private static final String _SEQUENCE_PREFIX = "pk_";
+
+	@Override
+	public void createAutoPk(DataNode node, List dbEntities) throws Exception {
+		List sequences = getExistingSequences(node);
+
+		// create needed sequences
+		Iterator it = dbEntities.iterator();
+		while (it.hasNext()) {
+			DbEntity ent = (DbEntity) it.next();
+			if (!sequences.contains(sequenceName(ent))) {
+				runUpdate(node, createSequenceString(ent));
+			}
+		}
+	}
+
+	@Override
+	public List createAutoPkStatements(List dbEntities) {
+		List<String> list = new ArrayList<String>();
+		Iterator it = dbEntities.iterator();
+		while (it.hasNext()) {
+			DbEntity ent = (DbEntity) it.next();
+			list.add(createSequenceString(ent));
+		}
+
+		return list;
+	}
+
+	@Override
+	public void dropAutoPk(DataNode node, List dbEntities) throws Exception {
+		List sequences = getExistingSequences(node);
+
+		// drop obsolete sequences
+		Iterator it = dbEntities.iterator();
+		while (it.hasNext()) {
+			DbEntity ent = (DbEntity) it.next();
+			String name;
+			if (ent.getDataMap().isQuotingSQLIdentifiers()) {
+				DbEntity tempEnt = new DbEntity();
+				DataMap dm = new DataMap();
+				dm.setQuotingSQLIdentifiers(false);
+				tempEnt.setDataMap(dm);
+				tempEnt.setName(ent.getName());
+				name = stripSchemaName(sequenceName(tempEnt));
+			} else {
+				name = stripSchemaName(sequenceName(ent));
+			}
+			if (sequences.contains(name)) {
+				runUpdate(node, dropSequenceString(ent));
+			}
+		}
+	}
+
+	@Override
+	public List dropAutoPkStatements(List dbEntities) {
+		List<String> list = new ArrayList<String>();
+		Iterator it = dbEntities.iterator();
+		while (it.hasNext()) {
+			DbEntity ent = (DbEntity) it.next();
+			list.add(dropSequenceString(ent));
+		}
+
+		return list;
+	}
+
+	protected String createSequenceString(DbEntity ent) {
+		return "CREATE SEQUENCE " + sequenceName(ent) + " START WITH " + pkStartValue + " INCREMENT BY "
+				+ pkCacheSize(ent);
+	}
+
+	/**
+	 * Returns a SQL string needed to drop any database objects associated with
+	 * automatic primary key generation process for a specific DbEntity.
+	 */
+	protected String dropSequenceString(DbEntity ent) {
+
+		return "DROP SEQUENCE " + sequenceName(ent);
+	}
+
+	/**
+	 * Generates primary key by calling Oracle sequence corresponding to the
+	 * <code>dbEntity</code>. Executed SQL looks like this:
+	 * 
+	 * <pre>
+	 *   SELECT pk_table_name.nextval FROM DUAL
+	 * </pre>
+	 * 
+	 * @since 3.0
+	 */
+	@Override
+	protected long longPkFromDatabase(DataNode node, DbEntity entity) throws Exception {
+
+		DbKeyGenerator pkGenerator = entity.getPrimaryKeyGenerator();
+		String pkGeneratingSequenceName;
+		if (pkGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(pkGenerator.getGeneratorType())
+				&& pkGenerator.getGeneratorName() != null) {
+			pkGeneratingSequenceName = pkGenerator.getGeneratorName();
+		} else {
+			pkGeneratingSequenceName = sequenceName(entity);
+		}
+
+		try (Connection con = node.getDataSource().getConnection();) {
+
+			try (Statement st = con.createStatement();) {
+				String sql = "SELECT " + pkGeneratingSequenceName + ".nextval FROM DUAL";
+				adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
+
+				try (ResultSet rs = st.executeQuery(sql);) {
+					// Object pk = null;
+					if (!rs.next()) {
+						throw new CayenneRuntimeException("Error generating pk for DbEntity " + entity.getName());
+					}
+					return rs.getLong(1);
+				}
+			}
+		}
+	}
+
+	protected int pkCacheSize(DbEntity entity) {
+		// use custom generator if possible
+		DbKeyGenerator keyGenerator = entity.getPrimaryKeyGenerator();
+		if (keyGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(keyGenerator.getGeneratorType())
+				&& keyGenerator.getGeneratorName() != null) {
+
+			Integer size = keyGenerator.getKeyCacheSize();
+			return (size != null && size.intValue() >= 1) ? size.intValue() : super.getPkCacheSize();
+		} else {
+			return super.getPkCacheSize();
+		}
+	}
+
+	/** Returns expected primary key sequence name for a DbEntity. */
+	protected String sequenceName(DbEntity entity) {
+
+		// use custom generator if possible
+		DbKeyGenerator keyGenerator = entity.getPrimaryKeyGenerator();
+		if (keyGenerator != null && DbKeyGenerator.ORACLE_TYPE.equals(keyGenerator.getGeneratorType())
+				&& keyGenerator.getGeneratorName() != null) {
+
+			return keyGenerator.getGeneratorName().toLowerCase();
+		} else {
+			String entName = entity.getName();
+			String seqName = _SEQUENCE_PREFIX + entName.toLowerCase();
+
+			return adapter.getQuotingStrategy().quotedIdentifier(entity, entity.getCatalog(), entity.getSchema(),
+					seqName);
+		}
+	}
+
+	protected String stripSchemaName(String sequenceName) {
+		int ind = sequenceName.indexOf('.');
+		return ind >= 0 ? sequenceName.substring(ind + 1) : sequenceName;
+	}
+
+	/**
+	 * Fetches a list of existing sequences that might match Cayenne generated
+	 * ones.
+	 */
+	protected List getExistingSequences(DataNode node) throws SQLException {
+
+		// check existing sequences
+
+		try (Connection con = node.getDataSource().getConnection();) {
+
+			try (Statement sel = con.createStatement();) {
+				String sql = "SELECT LOWER(SEQUENCE_NAME) FROM ALL_SEQUENCES";
+				adapter.getJdbcEventLogger().logQuery(sql, Collections.EMPTY_LIST);
+
+				try (ResultSet rs = sel.executeQuery(sql);) {
+					List<String> sequenceList = new ArrayList<String>();
+					while (rs.next()) {
+						sequenceList.add(rs.getString(1));
+					}
+					return sequenceList;
+				}
+			}
+		}
+	}
 }


Mime
View raw message