syncope-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ilgro...@apache.org
Subject svn commit: r1463998 - in /syncope/trunk: ./ core/src/main/java/org/apache/syncope/core/init/ core/src/main/java/org/apache/syncope/core/persistence/dao/impl/ core/src/main/java/org/apache/syncope/core/rest/controller/ core/src/main/java/org/apache/syn...
Date Wed, 03 Apr 2013 13:24:54 GMT
Author: ilgrosso
Date: Wed Apr  3 13:24:54 2013
New Revision: 1463998

URL: http://svn.apache.org/r1463998
Log:
Merge from 1_0_X + refactoring content import / export for using Spring's JdbcTemplate instead of plain java.sql classes

Added:
    syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java   (with props)
    syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentLoaderHandler.java
      - copied, changed from r1463911, syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ImportExport.java
Removed:
    syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ImportExport.java
    syncope/trunk/core/src/main/java/org/apache/syncope/core/util/SpringPersistenceUnitPostProcessor.java
Modified:
    syncope/trunk/   (props changed)
    syncope/trunk/core/src/main/java/org/apache/syncope/core/init/ContentUpgrader.java
    syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/AbstractContentDealer.java
    syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/ContentLoader.java
    syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ConfigurationController.java
    syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java
    syncope/trunk/core/src/main/java/org/apache/syncope/core/services/ConfigurationServiceImpl.java
    syncope/trunk/core/src/main/java/org/apache/syncope/core/util/MappingUtil.java
    syncope/trunk/core/src/test/java/org/apache/syncope/core/init/ConnectorManagerTest.java
    syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/AbstractTest.java
    syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java

Propchange: syncope/trunk/
------------------------------------------------------------------------------
  Merged /syncope/branches/1_0_X:r1463911-1463926

Modified: syncope/trunk/core/src/main/java/org/apache/syncope/core/init/ContentUpgrader.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/init/ContentUpgrader.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/init/ContentUpgrader.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/init/ContentUpgrader.java Wed Apr  3 13:24:54 2013
@@ -25,7 +25,6 @@ import java.sql.SQLException;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-import javax.sql.DataSource;
 import org.apache.commons.lang.StringUtils;
 import org.apache.syncope.common.search.NodeCond;
 import org.apache.syncope.common.to.UserTO;
@@ -64,9 +63,6 @@ import org.springframework.util.Reflecti
 public class ContentUpgrader extends AbstractContentDealer {
 
     @Autowired
-    private DataSource dataSource;
-
-    @Autowired
     private ConfDAO confDAO;
 
     @Autowired
@@ -286,8 +282,8 @@ public class ContentUpgrader extends Abs
 
         Connection conn = DataSourceUtils.getConnection(dataSource);
         try {
-            createIndexes(conn);
-            createViews(conn);
+            createIndexes();
+            createViews();
         } finally {
             DataSourceUtils.releaseConnection(conn, dataSource);
             if (conn != null) {

Modified: syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/AbstractContentDealer.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/AbstractContentDealer.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/AbstractContentDealer.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/AbstractContentDealer.java Wed Apr  3 13:24:54 2013
@@ -18,91 +18,85 @@
  */
 package org.apache.syncope.core.persistence.dao.impl;
 
-import java.io.InputStream;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
+import java.io.IOException;
 import java.util.Properties;
-import org.apache.commons.io.IOUtils;
+import javax.sql.DataSource;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.core.io.support.PropertiesLoaderUtils;
+import org.springframework.dao.DataAccessException;
+import org.springframework.jdbc.core.JdbcTemplate;
 
 public abstract class AbstractContentDealer {
 
     protected static final Logger LOG = LoggerFactory.getLogger(AbstractContentDealer.class);
 
-    private static final String VIEWS_FILE = "/views.xml";
+    protected static final String ROOT_ELEMENT = "dataset";
 
-    private static final String INDEXES_FILE = "/indexes.xml";
+    private static final String PERSISTENCE_PROPERTIES = "/persistence.properties";
 
-    protected void createIndexes(final Connection conn) {
-        LOG.debug("Creating indexes");
+    private static final String VIEWS_XML = "/views.xml";
+
+    private static final String INDEXES_XML = "/indexes.xml";
+
+    protected static String dbSchema;
+
+    protected static Properties views;
+
+    protected static Properties indexes;
 
-        InputStream indexesStream = null;
-        Properties indexes = new Properties();
+    @Autowired
+    protected DataSource dataSource;
+
+    static {
         try {
-            indexesStream = getClass().getResourceAsStream(INDEXES_FILE);
-            indexes.loadFromXML(indexesStream);
-        } catch (Exception e) {
-            throw new RuntimeException("Error loading properties from stream", e);
-        } finally {
-            IOUtils.closeQuietly(indexesStream);
+            Properties persistence = PropertiesLoaderUtils.loadProperties(
+                    new ClassPathResource(PERSISTENCE_PROPERTIES));
+            dbSchema = persistence.getProperty("database.schema");
+
+            views = PropertiesLoaderUtils.loadProperties(new ClassPathResource(VIEWS_XML));
+
+            indexes = PropertiesLoaderUtils.loadProperties(new ClassPathResource(INDEXES_XML));
+        } catch (IOException e) {
+            LOG.error("Could not read one or more properties files", e);
         }
+    }
+
+    protected void createIndexes() {
+        LOG.debug("Creating indexes");
+
+        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
 
         for (String idx : indexes.stringPropertyNames()) {
             LOG.debug("Creating index {}", indexes.get(idx).toString());
-            PreparedStatement statement = null;
+
             try {
-                final String updateIndexed = indexes.get(idx).toString();
-                statement = conn.prepareStatement(updateIndexed);
-                statement.executeUpdate();
-            } catch (SQLException e) {
+                jdbcTemplate.execute(indexes.get(idx).toString());
+            } catch (DataAccessException e) {
                 LOG.error("Could not create index ", e);
-            } finally {
-                closeStatement(statement);
             }
         }
+
+        LOG.debug("Indexes created");
     }
 
-    protected void createViews(final Connection conn) {
+    protected void createViews() {
         LOG.debug("Creating views");
-        InputStream viewsStream = null;
-        try {
-            viewsStream = getClass().getResourceAsStream(VIEWS_FILE);
-            Properties views = new Properties();
-            views.loadFromXML(viewsStream);
-
-            for (String idx : views.stringPropertyNames()) {
-                LOG.debug("Creating view {}", views.get(idx).toString());
-                PreparedStatement statement = null;
-                try {
-                    final String updateViews = views.get(idx).toString().replaceAll("\\n", " ");
-                    statement = conn.prepareStatement(updateViews);
-                    statement.executeUpdate();
-                } catch (SQLException e) {
-                    LOG.error("Could not create view ", e);
-                } finally {
-                    if (statement != null) {
-                        statement.close();
-                    }
-                }
-            }
 
-            LOG.debug("Views created, go for indexes");
-        } catch (Exception e) {
-            LOG.error("While creating views", e);
-        } finally {
-            IOUtils.closeQuietly(viewsStream);
-        }
-    }
+        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
+
+        for (String idx : views.stringPropertyNames()) {
+            LOG.debug("Creating view {}", views.get(idx).toString());
 
-    protected void closeStatement(final PreparedStatement statement) {
-        if (statement != null) {
             try {
-                statement.close();
-            } catch (SQLException e) {
-                LOG.error("Error closing SQL statement", e);
+                jdbcTemplate.execute(views.get(idx).toString().replaceAll("\\n", " "));
+            } catch (DataAccessException e) {
+                LOG.error("Could not create view ", e);
             }
         }
+
+        LOG.debug("Ciews created");
     }
 }

Modified: syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/ContentLoader.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/ContentLoader.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/ContentLoader.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/persistence/dao/impl/ContentLoader.java Wed Apr  3 13:24:54 2013
@@ -18,19 +18,15 @@
  */
 package org.apache.syncope.core.persistence.dao.impl;
 
+import static org.apache.syncope.core.persistence.dao.impl.AbstractContentDealer.LOG;
 import java.io.InputStream;
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import javax.sql.DataSource;
 import javax.xml.parsers.SAXParser;
 import javax.xml.parsers.SAXParserFactory;
 import org.apache.commons.io.IOUtils;
 import org.apache.syncope.core.persistence.beans.SyncopeConf;
-import org.apache.syncope.core.util.ImportExport;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.jdbc.datasource.DataSourceUtils;
+import org.apache.syncope.core.util.ContentLoaderHandler;
+import org.springframework.dao.DataAccessException;
+import org.springframework.jdbc.core.JdbcTemplate;
 import org.springframework.stereotype.Component;
 import org.springframework.transaction.annotation.Transactional;
 
@@ -40,65 +36,29 @@ import org.springframework.transaction.a
 @Component
 public class ContentLoader extends AbstractContentDealer {
 
-    @Autowired
-    private DataSource dataSource;
-
-    @Autowired
-    private ImportExport importExport;
+    public static final String CONTENT_XML = "content.xml";
 
     @Transactional
     public void load() {
-        Connection conn = null;
-        try {
-            conn = DataSourceUtils.getConnection(dataSource);
-
-            boolean existingData = isDataPresent(conn);
-            if (existingData) {
-                LOG.info("Data found in the database, leaving untouched");
-            } else {
-                LOG.info("Empty database found, loading default content");
-
-                loadDefaultContent();
-                createIndexes(conn);
-                createViews(conn);
-            }
-        } finally {
-            DataSourceUtils.releaseConnection(conn, dataSource);
-            if (conn != null) {
-                try {
-                    if (!conn.isClosed()) {
-                        conn.close();
-                    }
-                } catch (SQLException e) {
-                    LOG.error("While releasing connection", e);
-                }
-            }
-        }
-    }
+        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
 
-    private boolean isDataPresent(final Connection conn) {
-        PreparedStatement statement = null;
-        ResultSet rs = null;
+        boolean existingData;
         try {
-            final String queryContent = "SELECT * FROM " + SyncopeConf.class.getSimpleName();
-            statement = conn.prepareStatement(
-                    queryContent, ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY);
-            rs = statement.executeQuery();
-            rs.last();
-            return rs.getRow() > 0;
-        } catch (SQLException e) {
+            existingData = jdbcTemplate.queryForObject("SELECT COUNT(0) FROM " + SyncopeConf.class.getSimpleName(),
+                    Integer.class) > 0;
+        } catch (DataAccessException e) {
             LOG.error("Could not access to table " + SyncopeConf.class.getSimpleName(), e);
-            return true;
-        } finally {
-            if (rs != null) {
-                try {
-                    rs.close();
-                } catch (SQLException e) {
-                    LOG.error("While closing tables result set", e);
-                }
-            }
+            existingData = true;
+        }
 
-            closeStatement(statement);
+        if (existingData) {
+            LOG.info("Data found in the database, leaving untouched");
+        } else {
+            LOG.info("Empty database found, loading default content");
+
+            loadDefaultContent();
+            createIndexes();
+            createViews();
         }
     }
 
@@ -106,10 +66,10 @@ public class ContentLoader extends Abstr
         SAXParserFactory factory = SAXParserFactory.newInstance();
         InputStream in = null;
         try {
-            in = getClass().getResourceAsStream("/" + ImportExport.CONTENT_FILE);
+            in = getClass().getResourceAsStream("/" + CONTENT_XML);
 
             SAXParser parser = factory.newSAXParser();
-            parser.parse(in, importExport);
+            parser.parse(in, new ContentLoaderHandler(dataSource, ROOT_ELEMENT));
             LOG.debug("Default content successfully loaded");
         } catch (Exception e) {
             LOG.error("While loading default content", e);

Modified: syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ConfigurationController.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ConfigurationController.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ConfigurationController.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ConfigurationController.java Wed Apr  3 13:24:54 2013
@@ -38,9 +38,10 @@ import org.apache.syncope.core.init.Work
 import org.apache.syncope.core.persistence.beans.SyncopeConf;
 import org.apache.syncope.core.persistence.dao.ConfDAO;
 import org.apache.syncope.core.persistence.dao.MissingConfKeyException;
+import org.apache.syncope.core.persistence.dao.impl.ContentLoader;
 import org.apache.syncope.core.persistence.validation.attrvalue.Validator;
 import org.apache.syncope.core.rest.data.ConfigurationDataBinder;
-import org.apache.syncope.core.util.ImportExport;
+import org.apache.syncope.core.util.ContentExporter;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.core.io.Resource;
 import org.springframework.core.io.support.ResourcePatternResolver;
@@ -67,7 +68,7 @@ public class ConfigurationController ext
     private ConfigurationDataBinder binder;
 
     @Autowired
-    private ImportExport importExport;
+    private ContentExporter exporter;
 
     @Autowired
     private ImplementationClassNamesLoader classNamesLoader;
@@ -205,7 +206,7 @@ public class ConfigurationController ext
     public void dbExport(final HttpServletResponse response) {
         response.setContentType(MediaType.TEXT_XML);
         response.setHeader(SyncopeConstants.CONTENT_DISPOSITION_HEADER,
-                "attachment; filename=" + ImportExport.CONTENT_FILE);
+                "attachment; filename=" + ContentLoader.CONTENT_XML);
         try {
             dbExportInternal(response.getOutputStream());
         } catch (IOException e) {
@@ -217,7 +218,7 @@ public class ConfigurationController ext
     @Transactional(readOnly = true)
     public void dbExportInternal(final OutputStream os) {
         try {
-            importExport.export(os, wfAdapterLoader.getTablePrefix());
+            exporter.export(os, wfAdapterLoader.getTablePrefix());
 
             auditManager.audit(Category.configuration, ConfigurationSubCategory.dbExport, Result.success,
                     "Successfully exported database content");

Modified: syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/rest/controller/ReportController.java Wed Apr  3 13:24:54 2013
@@ -262,7 +262,7 @@ public class ReportController extends Ab
         try {
             os = response.getOutputStream();
         } catch (IOException e) {
-            throw new RuntimeException("Could not retrieve stream", e);
+            throw new IllegalStateException("Could not get output stream", e);
         }
         ReportExec reportExec = getAndCheckReportExecInternal(executionId);
 

Modified: syncope/trunk/core/src/main/java/org/apache/syncope/core/services/ConfigurationServiceImpl.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/services/ConfigurationServiceImpl.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/services/ConfigurationServiceImpl.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/services/ConfigurationServiceImpl.java Wed Apr  3 13:24:54 2013
@@ -35,8 +35,8 @@ import org.apache.syncope.common.to.Conf
 import org.apache.syncope.common.to.MailTemplateTO;
 import org.apache.syncope.common.to.ValidatorTO;
 import org.apache.syncope.common.util.CollectionWrapper;
+import org.apache.syncope.core.persistence.dao.impl.ContentLoader;
 import org.apache.syncope.core.rest.controller.ConfigurationController;
-import org.apache.syncope.core.util.ImportExport;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 
@@ -69,7 +69,7 @@ public class ConfigurationServiceImpl im
         return Response.ok(sout)
                 .type(MediaType.TEXT_XML)
                 .header(SyncopeConstants.CONTENT_DISPOSITION_HEADER,
-                "attachment; filename=" + ImportExport.CONTENT_FILE)
+                "attachment; filename=" + ContentLoader.CONTENT_XML)
                 .build();
     }
 

Added: syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java?rev=1463998&view=auto
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java (added)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java Wed Apr  3 13:24:54 2013
@@ -0,0 +1,376 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.syncope.core.util;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.sql.Blob;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import javax.xml.transform.OutputKeys;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerConfigurationException;
+import javax.xml.transform.sax.SAXTransformerFactory;
+import javax.xml.transform.sax.TransformerHandler;
+import javax.xml.transform.stream.StreamResult;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.syncope.common.SyncopeConstants;
+import org.apache.syncope.core.persistence.dao.impl.AbstractContentDealer;
+import org.apache.syncope.core.util.multiparent.MultiParentNode;
+import org.apache.syncope.core.util.multiparent.MultiParentNodeOp;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.jdbc.datasource.DataSourceUtils;
+import org.springframework.security.crypto.codec.Hex;
+import org.springframework.stereotype.Component;
+import org.xml.sax.SAXException;
+import org.xml.sax.helpers.AttributesImpl;
+
+/**
+ * Export internal storage content as XML.
+ */
+@Component
+public class ContentExporter extends AbstractContentDealer {
+
+    protected final static Set<String> TABLE_PREFIXES_TO_BE_EXCLUDED =
+            new HashSet<String>(Arrays.asList(new String[] {"QRTZ_", "LOGGING", "REPORTEXEC", "TASKEXEC",
+        "SYNCOPEUSER", "UATTR", "UATTRVALUE", "UATTRUNIQUEVALUE", "UDERATTR", "UVIRATTR",
+        "MEMBERSHIP", "MATTR", "MATTRVALUE", "MATTRUNIQUEVALUE", "MDERATTR", "MVIRATTR", "USERREQUEST"}));
+
+    protected static final Map<String, String> TABLES_TO_BE_FILTERED =
+            Collections.singletonMap("TASK", "DTYPE <> 'PropagationTask'");
+
+    protected static final Map<String, Set<String>> COLUMNS_TO_BE_NULLIFIED =
+            Collections.singletonMap("SYNCOPEROLE", Collections.singleton("USEROWNER_ID"));
+
+    private boolean isTableAllowed(final String tableName) {
+        boolean allowed = true;
+        for (String prefix : TABLE_PREFIXES_TO_BE_EXCLUDED) {
+            if (tableName.toUpperCase().startsWith(prefix)) {
+                allowed = false;
+            }
+        }
+        return allowed;
+    }
+
+    private List<String> sortByForeignKeys(final Connection conn, final Set<String> tableNames)
+            throws SQLException {
+
+        Set<MultiParentNode<String>> roots = new HashSet<MultiParentNode<String>>();
+
+        final DatabaseMetaData meta = conn.getMetaData();
+
+        final Map<String, MultiParentNode<String>> exploited =
+                new TreeMap<String, MultiParentNode<String>>(String.CASE_INSENSITIVE_ORDER);
+
+        final Set<String> pkTableNames = new HashSet<String>();
+
+        for (String tableName : tableNames) {
+            MultiParentNode<String> node = exploited.get(tableName);
+            if (node == null) {
+                node = new MultiParentNode<String>(tableName);
+                roots.add(node);
+                exploited.put(tableName, node);
+            }
+
+            pkTableNames.clear();
+
+            ResultSet rs = null;
+            try {
+                rs = meta.getImportedKeys(conn.getCatalog(), dbSchema, tableName);
+
+                // this is to avoid repetition
+                while (rs.next()) {
+                    pkTableNames.add(rs.getString("PKTABLE_NAME"));
+                }
+            } finally {
+                if (rs != null) {
+                    try {
+                        rs.close();
+                    } catch (SQLException e) {
+                        LOG.error("While closing tables result set", e);
+                    }
+                }
+            }
+
+            for (String pkTableName : pkTableNames) {
+                if (!tableName.equalsIgnoreCase(pkTableName)) {
+                    MultiParentNode<String> pkNode = exploited.get(pkTableName);
+                    if (pkNode == null) {
+                        pkNode = new MultiParentNode<String>(pkTableName);
+                        roots.add(pkNode);
+                        exploited.put(pkTableName, pkNode);
+                    }
+
+                    pkNode.addChild(node);
+
+                    if (roots.contains(node)) {
+                        roots.remove(node);
+                    }
+                }
+            }
+        }
+
+        final List<String> sortedTableNames = new ArrayList<String>(tableNames.size());
+        MultiParentNodeOp.traverseTree(roots, sortedTableNames);
+
+        // remove from sortedTableNames any table possibly added during lookup 
+        // but matching some item in this.tablePrefixesToBeExcluded
+        sortedTableNames.retainAll(tableNames);
+
+        LOG.debug("Tables after retainAll {}", sortedTableNames);
+
+        Collections.reverse(sortedTableNames);
+
+        return sortedTableNames;
+    }
+
+    private String getValues(final ResultSet rs, final String columnName, final Integer columnType)
+            throws SQLException {
+
+        String res = null;
+
+        try {
+            switch (columnType) {
+                case Types.BINARY:
+                case Types.VARBINARY:
+                case Types.LONGVARBINARY:
+                    final InputStream is = rs.getBinaryStream(columnName);
+                    if (is != null) {
+                        res = new String(Hex.encode(IOUtils.toByteArray(is)));
+                    }
+                    break;
+
+                case Types.BLOB:
+                    final Blob blob = rs.getBlob(columnName);
+                    if (blob != null) {
+                        res = new String(Hex.encode(IOUtils.toByteArray(blob.getBinaryStream())));
+                    }
+                    break;
+
+                case Types.BIT:
+                case Types.BOOLEAN:
+                    if (rs.getBoolean(columnName)) {
+                        res = "1";
+                    } else {
+                        res = "0";
+                    }
+                    break;
+
+                case Types.DATE:
+                case Types.TIME:
+                case Types.TIMESTAMP:
+                    final Timestamp timestamp = rs.getTimestamp(columnName);
+                    if (timestamp != null) {
+                        res = DataFormat.format(new Date(timestamp.getTime()));
+                    }
+                    break;
+
+                default:
+                    res = rs.getString(columnName);
+            }
+        } catch (IOException e) {
+            LOG.error("Error retrieving hexadecimal string", e);
+        }
+
+        return res;
+    }
+
+    private void doExportTable(final TransformerHandler handler, final Connection conn, final String tableName,
+            final String whereClause) throws SQLException, SAXException {
+
+        LOG.debug("Export table {}", tableName);
+
+        AttributesImpl attrs = new AttributesImpl();
+
+        PreparedStatement stmt = null;
+        ResultSet rs = null;
+        ResultSet pkeyRS = null;
+        try {
+            // ------------------------------------
+            // retrieve primary keys to perform an ordered select
+
+            final DatabaseMetaData meta = conn.getMetaData();
+            pkeyRS = meta.getPrimaryKeys(null, null, tableName);
+
+            final StringBuilder orderBy = new StringBuilder();
+
+            while (pkeyRS.next()) {
+                final String columnName = pkeyRS.getString("COLUMN_NAME");
+                if (columnName != null) {
+                    if (orderBy.length() > 0) {
+                        orderBy.append(",");
+                    }
+
+                    orderBy.append(columnName);
+                }
+            }
+
+            // ------------------------------------
+            StringBuilder query = new StringBuilder();
+            query.append("SELECT * FROM ").append(tableName).append(" a");
+            if (StringUtils.isNotBlank(whereClause)) {
+                query.append(" WHERE ").append(whereClause);
+            }
+            if (orderBy.length() > 0) {
+                query.append(" ORDER BY ").append(orderBy);
+            }
+            stmt = conn.prepareStatement(query.toString());
+
+            rs = stmt.executeQuery();
+            while (rs.next()) {
+                attrs.clear();
+
+                final ResultSetMetaData rsMeta = rs.getMetaData();
+                for (int i = 0; i < rsMeta.getColumnCount(); i++) {
+                    final String columnName = rsMeta.getColumnName(i + 1);
+                    final Integer columnType = rsMeta.getColumnType(i + 1);
+
+                    // Retrieve value taking care of binary values.
+                    String value = getValues(rs, columnName, columnType);
+                    if (value != null && (!COLUMNS_TO_BE_NULLIFIED.containsKey(tableName)
+                            || !COLUMNS_TO_BE_NULLIFIED.get(tableName).contains(columnName))) {
+
+                        attrs.addAttribute("", "", columnName, "CDATA", value);
+                    }
+                }
+
+                handler.startElement("", "", tableName, attrs);
+                handler.endElement("", "", tableName);
+
+                LOG.debug("Add record {}", attrs);
+            }
+        } finally {
+            if (rs != null) {
+                try {
+                    rs.close();
+                } catch (SQLException e) {
+                    LOG.error("While closing result set", e);
+                }
+            }
+            if (pkeyRS != null) {
+                try {
+                    pkeyRS.close();
+                } catch (SQLException e) {
+                    LOG.error("While closing result set", e);
+                }
+            }
+            if (stmt != null) {
+                try {
+                    stmt.close();
+                } catch (SQLException e) {
+                    LOG.error("While closing result set", e);
+                }
+            }
+        }
+    }
+
+    public void export(final OutputStream os, final String wfTablePrefix)
+            throws SAXException, TransformerConfigurationException {
+
+        if (StringUtils.isNotBlank(wfTablePrefix)) {
+            TABLE_PREFIXES_TO_BE_EXCLUDED.add(wfTablePrefix);
+        }
+
+        StreamResult streamResult = new StreamResult(os);
+        final SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
+
+        TransformerHandler handler = transformerFactory.newTransformerHandler();
+        Transformer serializer = handler.getTransformer();
+        serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING);
+        serializer.setOutputProperty(OutputKeys.INDENT, "yes");
+        handler.setResult(streamResult);
+        handler.startDocument();
+        handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl());
+
+        Connection conn = null;
+        ResultSet rs = null;
+        try {
+            conn = DataSourceUtils.getConnection(dataSource);
+            final DatabaseMetaData meta = conn.getMetaData();
+
+            final String schema = dbSchema;
+
+            rs = meta.getTables(null, schema, null, new String[] {"TABLE"});
+
+            final Set<String> tableNames = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
+
+            while (rs.next()) {
+                String tableName = rs.getString("TABLE_NAME");
+                LOG.debug("Found table {}", tableName);
+                if (isTableAllowed(tableName)) {
+                    tableNames.add(tableName);
+                }
+            }
+
+            LOG.debug("Tables to be exported {}", tableNames);
+
+            // then sort tables based on foreign keys and dump
+            for (String tableName : sortByForeignKeys(conn, tableNames)) {
+                try {
+                    doExportTable(handler, conn, tableName, TABLES_TO_BE_FILTERED.get(tableName.toUpperCase()));
+                } catch (Exception e) {
+                    LOG.error("Failure exporting table {}", tableName, e);
+                }
+            }
+        } catch (SQLException e) {
+            LOG.error("While exporting database content", e);
+        } finally {
+            if (rs != null) {
+                try {
+                    rs.close();
+                } catch (SQLException e) {
+                    LOG.error("While closing tables result set", e);
+                }
+            }
+
+            DataSourceUtils.releaseConnection(conn, dataSource);
+            if (conn != null) {
+                try {
+                    if (!conn.isClosed()) {
+                        conn.close();
+                    }
+                } catch (SQLException e) {
+                    LOG.error("While releasing connection", e);
+                }
+            }
+        }
+
+        handler.endElement("", "", ROOT_ELEMENT);
+        handler.endDocument();
+    }
+}

Propchange: syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java
------------------------------------------------------------------------------
    svn:eol-style = native

Propchange: syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java
------------------------------------------------------------------------------
    svn:keywords = Date Author Id Revision HeadURL

Propchange: syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentExporter.java
------------------------------------------------------------------------------
    svn:mime-type = text/plain

Copied: syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentLoaderHandler.java (from r1463911, syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ImportExport.java)
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentLoaderHandler.java?p2=syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentLoaderHandler.java&p1=syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ImportExport.java&r1=1463911&r2=1463998&rev=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ImportExport.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/util/ContentLoaderHandler.java Wed Apr  3 13:24:54 2013
@@ -18,151 +18,57 @@
  */
 package org.apache.syncope.core.util;
 
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.sql.Blob;
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.PreparedStatement;
 import java.sql.ResultSet;
-import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
-import java.sql.Timestamp;
 import java.sql.Types;
 import java.text.ParseException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Date;
 import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.TreeSet;
-import javax.persistence.EntityManager;
-import javax.persistence.Query;
-import javax.persistence.TemporalType;
 import javax.sql.DataSource;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerConfigurationException;
-import javax.xml.transform.sax.SAXTransformerFactory;
-import javax.xml.transform.sax.TransformerHandler;
-import javax.xml.transform.stream.StreamResult;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.syncope.common.SyncopeConstants;
-import org.apache.syncope.core.util.multiparent.MultiParentNode;
-import org.apache.syncope.core.util.multiparent.MultiParentNodeOp;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.jdbc.datasource.DataSourceUtils;
+import org.springframework.dao.DataAccessException;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.core.ResultSetExtractor;
 import org.springframework.security.crypto.codec.Hex;
-import org.springframework.stereotype.Component;
 import org.xml.sax.Attributes;
 import org.xml.sax.SAXException;
-import org.xml.sax.helpers.AttributesImpl;
 import org.xml.sax.helpers.DefaultHandler;
 
-@Component
-public class ImportExport extends DefaultHandler {
-
-    private static final Logger LOG = LoggerFactory.getLogger(ImportExport.class);
-
-    public static final String CONTENT_FILE = "content.xml";
-
-    private static final String ROOT_ELEMENT = "dataset";
-
-    @Autowired
-    private EntityManager entityManager;
-
-    @Autowired
-    private DataSource dataSource;
-
-    private final static Set<String> TABLE_PREFIXES_TO_BE_EXCLUDED =
-            new HashSet<String>(Arrays.asList(new String[]{"QRTZ_", "LOGGING", "REPORTEXEC", "TASKEXEC",
-        "SYNCOPEUSER", "UATTR", "UATTRVALUE", "UATTRUNIQUEVALUE", "UDERATTR", "UVIRATTR",
-        "MEMBERSHIP", "MATTR", "MATTRVALUE", "MATTRUNIQUEVALUE", "MDERATTR", "MVIRATTR", "USERREQUEST"}));
-
-    private final static Map<String, String> TABLES_TO_BE_FILTERED =
-            Collections.singletonMap("TASK", "DTYPE <> 'PropagationTask'");
+/**
+ * SAX handler for generating SQL INSERT statements out of given XML file.
+ */
+public class ContentLoaderHandler extends DefaultHandler {
 
-    private final static Map<String, Set<String>> COLUMNS_TO_BE_NULLIFIED =
-            Collections.singletonMap("SYNCOPEROLE", Collections.singleton("USEROWNER_ID"));
+    private static final Logger LOG = LoggerFactory.getLogger(ContentLoaderHandler.class);
 
-    private String readSchema() {
-        String schema = null;
+    private final DataSource dataSource;
 
-        InputStream dbPropsStream = null;
-        try {
-            dbPropsStream = getClass().getResourceAsStream("/persistence.properties");
-            Properties dbProps = new Properties();
-            dbProps.load(dbPropsStream);
-            schema = dbProps.getProperty("database.schema");
-        } catch (Exception e) {
-            if (LOG.isDebugEnabled()) {
-                LOG.debug("Could not find persistence.properties", e);
-            } else {
-                LOG.error("Could not find persistence.properties");
-            }
-        } finally {
-            IOUtils.closeQuietly(dbPropsStream);
-        }
+    private final String rootElement;
 
-        return schema;
+    public ContentLoaderHandler(final DataSource dataSource, final String rootElement) {
+        this.dataSource = dataSource;
+        this.rootElement = rootElement;
     }
 
-    private void setParameters(final String tableName, final Attributes attrs, final Query query) {
-        Map<String, Integer> colTypes = new HashMap<String, Integer>();
+    private Object[] getParameters(final String tableName, final Attributes attrs) {
+        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
 
-        Connection conn = null;
-        ResultSet rs = null;
-        PreparedStatement stmt = null;
-        try {
-            conn = DataSourceUtils.getConnection(dataSource);
+        Map<String, Integer> colTypes = jdbcTemplate.query("SELECT * FROM " + tableName,
+                new ResultSetExtractor<Map<String, Integer>>() {
 
-            final String queryString = "SELECT * FROM " + tableName;
-            stmt = conn.prepareStatement(queryString);
-            rs = stmt.executeQuery();
-            for (int i = 0; i < rs.getMetaData().getColumnCount(); i++) {
-                colTypes.put(rs.getMetaData().getColumnName(i + 1).toUpperCase(),
-                        rs.getMetaData().getColumnType(i + 1));
-            }
-        } catch (SQLException e) {
-            LOG.error("While setting column types", e);
-        } finally {
-            if (stmt != null) {
-                try {
-                    stmt.close();
-                } catch (SQLException e) {
-                    LOG.error("While closing statement", e);
-                }
-            }
-            if (rs != null) {
-                try {
-                    rs.close();
-                } catch (SQLException e) {
-                    LOG.error("While closing result set", e);
+            @Override
+            public Map<String, Integer> extractData(final ResultSet rs) throws SQLException, DataAccessException {
+                Map<String, Integer> colTypes = new HashMap<String, Integer>();
+                for (int i = 1; i <= rs.getMetaData().getColumnCount(); i++) {
+                    colTypes.put(rs.getMetaData().getColumnName(i).toUpperCase(),
+                            rs.getMetaData().getColumnType(i));
                 }
+                return colTypes;
             }
+        });
 
-            DataSourceUtils.releaseConnection(conn, dataSource);
-            if (conn != null) {
-                try {
-                    if (!conn.isClosed()) {
-                        conn.close();
-                    }
-                } catch (SQLException e) {
-                    LOG.error("While releasing connection", e);
-                }
-            }
-        }
-
+        Object[] parameters = new Object[attrs.getLength()];
         for (int i = 0; i < attrs.getLength(); i++) {
             Integer colType = colTypes.get(attrs.getQName(i).toUpperCase());
             if (colType == null) {
@@ -175,10 +81,10 @@ public class ImportExport extends Defaul
                 case Types.TINYINT:
                 case Types.SMALLINT:
                     try {
-                        query.setParameter(i + 1, Integer.valueOf(attrs.getValue(i)));
+                        parameters[i] = Integer.valueOf(attrs.getValue(i));
                     } catch (NumberFormatException e) {
                         LOG.error("Unparsable Integer '{}'", attrs.getValue(i));
-                        query.setParameter(i + 1, attrs.getValue(i));
+                        parameters[i] = attrs.getValue(i);
                     }
                     break;
 
@@ -186,29 +92,29 @@ public class ImportExport extends Defaul
                 case Types.DECIMAL:
                 case Types.BIGINT:
                     try {
-                        query.setParameter(i + 1, Long.valueOf(attrs.getValue(i)));
+                        parameters[i] = Long.valueOf(attrs.getValue(i));
                     } catch (NumberFormatException e) {
                         LOG.error("Unparsable Long '{}'", attrs.getValue(i));
-                        query.setParameter(i + 1, attrs.getValue(i));
+                        parameters[i] = attrs.getValue(i);
                     }
                     break;
 
                 case Types.DOUBLE:
                     try {
-                        query.setParameter(i + 1, Double.valueOf(attrs.getValue(i)));
+                        parameters[i] = Double.valueOf(attrs.getValue(i));
                     } catch (NumberFormatException e) {
                         LOG.error("Unparsable Double '{}'", attrs.getValue(i));
-                        query.setParameter(i + 1, attrs.getValue(i));
+                        parameters[i] = attrs.getValue(i);
                     }
                     break;
 
                 case Types.REAL:
                 case Types.FLOAT:
                     try {
-                        query.setParameter(i + 1, Float.valueOf(attrs.getValue(i)));
+                        parameters[i] = Float.valueOf(attrs.getValue(i));
                     } catch (NumberFormatException e) {
                         LOG.error("Unparsable Float '{}'", attrs.getValue(i));
-                        query.setParameter(i + 1, attrs.getValue(i));
+                        parameters[i] = attrs.getValue(i);
                     }
                     break;
 
@@ -216,43 +122,45 @@ public class ImportExport extends Defaul
                 case Types.TIME:
                 case Types.TIMESTAMP:
                     try {
-                        query.setParameter(i + 1, DataFormat.parseDate(attrs.getValue(i)), TemporalType.TIMESTAMP);
+                        parameters[i] = DataFormat.parseDate(attrs.getValue(i));
                     } catch (ParseException e) {
                         LOG.error("Unparsable Date '{}'", attrs.getValue(i));
-                        query.setParameter(i + 1, attrs.getValue(i));
+                        parameters[i] = attrs.getValue(i);
                     }
                     break;
 
                 case Types.BIT:
                 case Types.BOOLEAN:
-                    query.setParameter(i + 1, "1".equals(attrs.getValue(i)) ? Boolean.TRUE : Boolean.FALSE);
+                    parameters[i] = "1".equals(attrs.getValue(i)) ? Boolean.TRUE : Boolean.FALSE;
                     break;
 
                 case Types.BINARY:
                 case Types.VARBINARY:
                 case Types.LONGVARBINARY:
                     try {
-                        query.setParameter(i + 1, Hex.decode(attrs.getValue(i)));
+                        parameters[i] = Hex.decode(attrs.getValue(i));
                     } catch (IllegalArgumentException e) {
-                        query.setParameter(i + 1, attrs.getValue(i));
+                        parameters[i] = attrs.getValue(i);
                     }
                     break;
 
                 case Types.BLOB:
                     try {
-                        query.setParameter(i + 1, Hex.decode(attrs.getValue(i)));
+                        parameters[i] = Hex.decode(attrs.getValue(i));
                     } catch (IllegalArgumentException e) {
                         LOG.warn("Error decoding hex string to specify a blob parameter", e);
-                        query.setParameter(i + 1, attrs.getValue(i));
+                        parameters[i] = attrs.getValue(i);
                     } catch (Exception e) {
                         LOG.warn("Error creating a new blob parameter", e);
                     }
                     break;
 
                 default:
-                    query.setParameter(i + 1, attrs.getValue(i));
+                    parameters[i] = attrs.getValue(i);
             }
         }
+
+        return parameters;
     }
 
     @Override
@@ -260,324 +168,30 @@ public class ImportExport extends Defaul
             throws SAXException {
 
         // skip root element
-        if (ROOT_ELEMENT.equals(qName)) {
+        if (rootElement.equals(qName)) {
             return;
         }
 
-        StringBuilder queryString = new StringBuilder("INSERT INTO ").append(qName).append('(');
+        StringBuilder query = new StringBuilder("INSERT INTO ").append(qName).append('(');
 
         StringBuilder values = new StringBuilder();
 
         for (int i = 0; i < atts.getLength(); i++) {
-            queryString.append(atts.getQName(i));
+            query.append(atts.getQName(i));
             values.append('?');
             if (i < atts.getLength() - 1) {
-                queryString.append(',');
+                query.append(',');
                 values.append(',');
             }
         }
-        queryString.append(") VALUES (").append(values).append(')');
+        query.append(") VALUES (").append(values).append(')');
 
-        Query query = entityManager.createNativeQuery(queryString.toString());
-        setParameters(qName, atts, query);
-
-        query.executeUpdate();
-    }
+        JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
 
-    private void doExportTable(final TransformerHandler handler, final Connection conn, final String tableName,
-            final String whereClause) throws SQLException, SAXException {
-
-        LOG.debug("Export table {}", tableName);
-
-        AttributesImpl attrs = new AttributesImpl();
-
-        PreparedStatement stmt = null;
-        ResultSet rs = null;
-        ResultSet pkeyRS = null;
         try {
-            // ------------------------------------
-            // retrieve primary keys to perform an ordered select
-
-            final DatabaseMetaData meta = conn.getMetaData();
-            pkeyRS = meta.getPrimaryKeys(null, null, tableName);
-
-            final StringBuilder orderBy = new StringBuilder();
-
-            while (pkeyRS.next()) {
-                final String columnName = pkeyRS.getString("COLUMN_NAME");
-                if (columnName != null) {
-                    if (orderBy.length() > 0) {
-                        orderBy.append(",");
-                    }
-
-                    orderBy.append(columnName);
-                }
-            }
-
-            // ------------------------------------
-            StringBuilder query = new StringBuilder();
-            query.append("SELECT * FROM ").append(tableName).append(" a");
-            if (StringUtils.isNotBlank(whereClause)) {
-                query.append(" WHERE ").append(whereClause);
-            }
-            if (orderBy.length() > 0) {
-                query.append(" ORDER BY ").append(orderBy);
-            }
-            stmt = conn.prepareStatement(query.toString());
-
-            rs = stmt.executeQuery();
-            while (rs.next()) {
-                attrs.clear();
-
-                final ResultSetMetaData rsMeta = rs.getMetaData();
-                for (int i = 0; i < rsMeta.getColumnCount(); i++) {
-                    final String columnName = rsMeta.getColumnName(i + 1);
-                    final Integer columnType = rsMeta.getColumnType(i + 1);
-
-                    // Retrieve value taking care of binary values.
-                    String value = getValues(rs, columnName, columnType);
-                    if (value != null && (!COLUMNS_TO_BE_NULLIFIED.containsKey(tableName)
-                            || !COLUMNS_TO_BE_NULLIFIED.get(tableName).contains(columnName))) {
-
-                        attrs.addAttribute("", "", columnName, "CDATA", value);
-                    }
-                }
-
-                handler.startElement("", "", tableName, attrs);
-                handler.endElement("", "", tableName);
-
-                LOG.debug("Add record {}", attrs);
-            }
-        } finally {
-            if (rs != null) {
-                try {
-                    rs.close();
-                } catch (SQLException e) {
-                    LOG.error("While closing result set", e);
-                }
-            }
-            if (pkeyRS != null) {
-                try {
-                    pkeyRS.close();
-                } catch (SQLException e) {
-                    LOG.error("While closing result set", e);
-                }
-            }
-            if (stmt != null) {
-                try {
-                    stmt.close();
-                } catch (SQLException e) {
-                    LOG.error("While closing result set", e);
-                }
-            }
+            jdbcTemplate.update(query.toString(), getParameters(qName, atts));
+        } catch (DataAccessException e) {
+            LOG.error("While trying to perform {}", query, e);
         }
     }
-
-    private List<String> sortByForeignKeys(final Connection conn, final Set<String> tableNames)
-            throws SQLException {
-
-        Set<MultiParentNode<String>> roots = new HashSet<MultiParentNode<String>>();
-
-        final DatabaseMetaData meta = conn.getMetaData();
-
-        final Map<String, MultiParentNode<String>> exploited =
-                new TreeMap<String, MultiParentNode<String>>(String.CASE_INSENSITIVE_ORDER);
-
-        final Set<String> pkTableNames = new HashSet<String>();
-
-        for (String tableName : tableNames) {
-            MultiParentNode<String> node = exploited.get(tableName);
-            if (node == null) {
-                node = new MultiParentNode<String>(tableName);
-                roots.add(node);
-                exploited.put(tableName, node);
-            }
-
-            pkTableNames.clear();
-
-            ResultSet rs = null;
-            try {
-                rs = meta.getImportedKeys(conn.getCatalog(), readSchema(), tableName);
-
-                // this is to avoid repetition
-                while (rs.next()) {
-                    pkTableNames.add(rs.getString("PKTABLE_NAME"));
-                }
-            } finally {
-                if (rs != null) {
-                    try {
-                        rs.close();
-                    } catch (SQLException e) {
-                        LOG.error("While closing tables result set", e);
-                    }
-                }
-            }
-
-            for (String pkTableName : pkTableNames) {
-                if (!tableName.equalsIgnoreCase(pkTableName)) {
-                    MultiParentNode<String> pkNode = exploited.get(pkTableName);
-                    if (pkNode == null) {
-                        pkNode = new MultiParentNode<String>(pkTableName);
-                        roots.add(pkNode);
-                        exploited.put(pkTableName, pkNode);
-                    }
-
-                    pkNode.addChild(node);
-
-                    if (roots.contains(node)) {
-                        roots.remove(node);
-                    }
-                }
-            }
-        }
-
-        final List<String> sortedTableNames = new ArrayList<String>(tableNames.size());
-        MultiParentNodeOp.traverseTree(roots, sortedTableNames);
-
-        // remove from sortedTableNames any table possibly added during lookup 
-        // but matching some item in this.tablePrefixesToBeExcluded
-        sortedTableNames.retainAll(tableNames);
-
-        LOG.debug("Tables after retainAll {}", sortedTableNames);
-
-        Collections.reverse(sortedTableNames);
-
-        return sortedTableNames;
-    }
-
-    private boolean isTableAllowed(final String tableName) {
-        boolean allowed = true;
-        for (String prefix : TABLE_PREFIXES_TO_BE_EXCLUDED) {
-            if (tableName.toUpperCase().startsWith(prefix)) {
-                allowed = false;
-            }
-        }
-        return allowed;
-    }
-
-    public void export(final OutputStream os, final String wfTablePrefix)
-            throws SAXException, TransformerConfigurationException {
-
-        if (StringUtils.isNotBlank(wfTablePrefix)) {
-            TABLE_PREFIXES_TO_BE_EXCLUDED.add(wfTablePrefix);
-        }
-
-        StreamResult streamResult = new StreamResult(os);
-        final SAXTransformerFactory transformerFactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
-
-        TransformerHandler handler = transformerFactory.newTransformerHandler();
-        Transformer serializer = handler.getTransformer();
-        serializer.setOutputProperty(OutputKeys.ENCODING, SyncopeConstants.DEFAULT_ENCODING);
-        serializer.setOutputProperty(OutputKeys.INDENT, "yes");
-        handler.setResult(streamResult);
-        handler.startDocument();
-        handler.startElement("", "", ROOT_ELEMENT, new AttributesImpl());
-
-        Connection conn = null;
-        ResultSet rs = null;
-        try {
-            conn = DataSourceUtils.getConnection(dataSource);
-            final DatabaseMetaData meta = conn.getMetaData();
-
-            final String schema = readSchema();
-
-            rs = meta.getTables(null, schema, null, new String[]{"TABLE"});
-
-            final Set<String> tableNames = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
-
-            while (rs.next()) {
-                String tableName = rs.getString("TABLE_NAME");
-                LOG.debug("Found table {}", tableName);
-                if (isTableAllowed(tableName)) {
-                    tableNames.add(tableName);
-                }
-            }
-
-            LOG.debug("Tables to be exported {}", tableNames);
-
-            // then sort tables based on foreign keys and dump
-            for (String tableName : sortByForeignKeys(conn, tableNames)) {
-                try {
-                    doExportTable(handler, conn, tableName, TABLES_TO_BE_FILTERED.get(tableName.toUpperCase()));
-                } catch (Exception e) {
-                    LOG.error("Failure exporting table {}", tableName, e);
-                }
-            }
-        } catch (SQLException e) {
-            LOG.error("While exporting database content", e);
-        } finally {
-            if (rs != null) {
-                try {
-                    rs.close();
-                } catch (SQLException e) {
-                    LOG.error("While closing tables result set", e);
-                }
-            }
-
-            DataSourceUtils.releaseConnection(conn, dataSource);
-            if (conn != null) {
-                try {
-                    if (!conn.isClosed()) {
-                        conn.close();
-                    }
-                } catch (SQLException e) {
-                    LOG.error("While releasing connection", e);
-                }
-            }
-        }
-
-        handler.endElement("", "", ROOT_ELEMENT);
-        handler.endDocument();
-    }
-
-    private String getValues(final ResultSet rs, final String columnName, final Integer columnType)
-            throws SQLException {
-
-        String res = null;
-
-        try {
-            switch (columnType) {
-                case Types.BINARY:
-                case Types.VARBINARY:
-                case Types.LONGVARBINARY:
-                    final InputStream is = rs.getBinaryStream(columnName);
-                    if (is != null) {
-                        res = new String(Hex.encode(IOUtils.toByteArray(is)));
-                    }
-                    break;
-
-                case Types.BLOB:
-                    final Blob blob = rs.getBlob(columnName);
-                    if (blob != null) {
-                        res = new String(Hex.encode(IOUtils.toByteArray(blob.getBinaryStream())));
-                    }
-                    break;
-
-                case Types.BIT:
-                case Types.BOOLEAN:
-                    if (rs.getBoolean(columnName)) {
-                        res = "1";
-                    } else {
-                        res = "0";
-                    }
-                    break;
-
-                case Types.DATE:
-                case Types.TIME:
-                case Types.TIMESTAMP:
-                    final Timestamp timestamp = rs.getTimestamp(columnName);
-                    if (timestamp != null) {
-                        res = DataFormat.format(new Date(timestamp.getTime()));
-                    }
-                    break;
-
-                default:
-                    res = rs.getString(columnName);
-            }
-        } catch (IOException e) {
-            LOG.error("Error retrieving hexadecimal string", e);
-        }
-
-        return res;
-    }
 }

Modified: syncope/trunk/core/src/main/java/org/apache/syncope/core/util/MappingUtil.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/main/java/org/apache/syncope/core/util/MappingUtil.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/main/java/org/apache/syncope/core/util/MappingUtil.java (original)
+++ syncope/trunk/core/src/main/java/org/apache/syncope/core/util/MappingUtil.java Wed Apr  3 13:24:54 2013
@@ -364,12 +364,13 @@ public final class MappingUtil {
                         }
                         if (vAttrsToBeRemoved != null && vAttrsToBeUpdated != null) {
                             if (vAttrsToBeUpdated.containsKey(mappingItem.getIntAttrName())) {
-                                virAttr.setValues(vAttrsToBeUpdated.get(mappingItem.getIntAttrName()).
-                                        getValuesToBeAdded());
+                                virAttr.setValues(
+                                        vAttrsToBeUpdated.get(mappingItem.getIntAttrName()).getValuesToBeAdded());
                             } else if (vAttrsToBeRemoved.contains(mappingItem.getIntAttrName())) {
                                 virAttr.getValues().clear();
                             } else {
-                                throw new RuntimeException("Virtual attribute has not to be updated");
+                                throw new IllegalArgumentException("Don't need to update virtual attribute '"
+                                        + mappingItem.getIntAttrName() + "'");
                             }
                         }
                     }

Modified: syncope/trunk/core/src/test/java/org/apache/syncope/core/init/ConnectorManagerTest.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/test/java/org/apache/syncope/core/init/ConnectorManagerTest.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/test/java/org/apache/syncope/core/init/ConnectorManagerTest.java (original)
+++ syncope/trunk/core/src/test/java/org/apache/syncope/core/init/ConnectorManagerTest.java Wed Apr  3 13:24:54 2013
@@ -27,7 +27,6 @@ import org.apache.syncope.core.propagati
 import org.apache.syncope.core.rest.data.ResourceDataBinder;
 import org.apache.syncope.core.util.ApplicationContextProvider;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.test.util.ReflectionTestUtils;
@@ -54,7 +53,7 @@ public class ConnectorManagerTest extend
         connManager.unload();
     }
 
-    @Test@Ignore
+    @Test
     public void load() {
         connManager.load();
 

Modified: syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/AbstractTest.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/AbstractTest.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/AbstractTest.java (original)
+++ syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/AbstractTest.java Wed Apr  3 13:24:54 2013
@@ -19,6 +19,7 @@
 package org.apache.syncope.core.rest;
 
 import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -133,7 +134,7 @@ public abstract class AbstractTest {
     private String contentType;
 
     protected UserService userService;
-    
+
     protected UserWorkflowService userWorkflowService;
 
     protected RoleService roleService;
@@ -357,12 +358,12 @@ public abstract class AbstractTest {
                 builder.append(key + ":" + headers.getFirst(key) + ",");
             }
             builder.append(")");
-            throw new RuntimeException("Error on create. Status is : " + response.getStatus() + " with headers "
+            fail("Error on create. Status is : " + response.getStatus() + " with headers "
                     + builder.toString());
         }
     }
 
-    <T extends AbstractSchemaTO> Response createSchema(final AttributableType kind,
+    protected <T extends AbstractSchemaTO> Response createSchema(final AttributableType kind,
             final SchemaType type, final T schemaTO) {
         Response response = schemaService.create(kind, type, schemaTO);
         if (response.getStatus() != HttpStatus.SC_CREATED) {

Modified: syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java
URL: http://svn.apache.org/viewvc/syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java?rev=1463998&r1=1463997&r2=1463998&view=diff
==============================================================================
--- syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java (original)
+++ syncope/trunk/core/src/test/java/org/apache/syncope/core/rest/TaskTestITCase.java Wed Apr  3 13:24:54 2013
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertFal
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.util.ArrayList;
 import java.util.Collections;
@@ -721,7 +722,7 @@ public class TaskTestITCase extends Abst
             i++;
         } while (preSyncSize == taskTO.getExecutions().size() && i < maxit);
         if (i == maxit) {
-            throw new RuntimeException("Timeout when executing task " + taskId);
+            fail("Timeout when executing task " + taskId);
         }
         return taskTO.getExecutions().get(0);
     }



Mime
View raw message