asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mhub...@apache.org
Subject [1/2] incubator-asterixdb git commit: Improve the Testing Framework
Date Tue, 12 Jan 2016 05:46:00 GMT
Repository: incubator-asterixdb
Updated Branches:
  refs/heads/master 192c7b65a -> c614ce5cb


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c614ce5c/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
b/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
index 5062d06..438db58 100644
--- a/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
+++ b/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetLifecycleManager.java
@@ -146,7 +146,9 @@ public class DatasetLifecycleManager implements IDatasetLifecycleManager,
ILifeC
 
         PrimaryIndexOperationTracker opTracker = (PrimaryIndexOperationTracker) datasetOpTrackers.get(dsInfo.datasetID);
         if (iInfo.referenceCount != 0 || (opTracker != null && opTracker.getNumActiveOperations()
!= 0)) {
-            throw new HyracksDataException("Cannot remove index while it is open.");
+            throw new HyracksDataException("Cannot remove index while it is open. (Dataset
reference count = "
+                    + iInfo.referenceCount + ", Operation tracker number of active operations
= "
+                    + opTracker.getNumActiveOperations() + ")");
         }
 
         // TODO: use fine-grained counters, one for each index instead of a single counter
per dataset.
@@ -301,6 +303,7 @@ public class DatasetLifecycleManager implements IDatasetLifecycleManager,
ILifeC
         return openIndexesInfo;
     }
 
+    @Override
     public List<IVirtualBufferCache> getVirtualBufferCaches(int datasetID) {
         synchronized (datasetVirtualBufferCaches) {
             List<IVirtualBufferCache> vbcs = datasetVirtualBufferCaches.get(datasetID);
@@ -607,6 +610,7 @@ public class DatasetLifecycleManager implements IDatasetLifecycleManager,
ILifeC
         removeDatasetFromCache(dsInfo.datasetID);
     }
 
+    @Override
     public void closeAllDatasets() throws HyracksDataException {
         for (DatasetInfo dsInfo : datasetInfos.values()) {
             closeDataset(dsInfo);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c614ce5c/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java b/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
index 6afe692..447e96d 100644
--- a/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
+++ b/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
@@ -22,6 +22,7 @@ import java.io.BufferedInputStream;
 import java.io.BufferedReader;
 import java.io.File;
 import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.InputStream;
 import java.io.InputStreamReader;
@@ -40,6 +41,7 @@ import org.apache.asterix.testframework.context.TestCaseContext;
 import org.apache.asterix.testframework.context.TestCaseContext.OutputFormat;
 import org.apache.asterix.testframework.context.TestFileContext;
 import org.apache.asterix.testframework.xml.TestCase.CompilationUnit;
+import org.apache.asterix.testframework.xml.TestCase.CompilationUnit.ExpectedError;
 import org.apache.asterix.testframework.xml.TestGroup;
 import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
 import org.apache.commons.httpclient.HttpClient;
@@ -197,15 +199,8 @@ public class TestExecutor {
 
     // For tests where you simply want the byte-for-byte output.
     private static void writeOutputToFile(File actualFile, InputStream resultStream) throws
Exception {
-        byte[] buffer = new byte[10240];
-        int len;
-        java.io.FileOutputStream out = new java.io.FileOutputStream(actualFile);
-        try {
-            while ((len = resultStream.read(buffer)) != -1) {
-                out.write(buffer, 0, len);
-            }
-        } finally {
-            out.close();
+        try (FileOutputStream out = new FileOutputStream(actualFile)) {
+            IOUtils.copy(resultStream, out);
         }
     }
 
@@ -224,16 +219,13 @@ public class TestExecutor {
             // In future this may be changed depending on the requested
             // output format sent to the servlet.
             String errorBody = method.getResponseBodyAsString();
-            try {
-                JSONObject result = new JSONObject(errorBody);
-                String[] errors = { result.getJSONArray("error-code").getString(0), result.getString("summary"),
-                        result.getString("stacktrace") };
-                GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errors[2]);
-                throw new Exception("HTTP operation failed: " + errors[0] + "\nSTATUS LINE:
" + method.getStatusLine()
-                        + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: " + errors[2]);
-            } catch (Exception e) {
-                throw new Exception(errorBody);
-            }
+            JSONObject result = new JSONObject(errorBody);
+            String[] errors = { result.getJSONArray("error-code").getString(0), result.getString("summary"),
+                    result.getString("stacktrace") };
+            GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errors[2]);
+            String exceptionMsg = "HTTP operation failed: " + errors[0] + "\nSTATUS LINE:
" + method.getStatusLine()
+                    + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: " + errors[2];
+            throw new Exception(exceptionMsg);
         }
         return statusCode;
     }
@@ -418,6 +410,8 @@ public class TestExecutor {
         File qbcFile = null;
         File qarFile = null;
         int queryCount = 0;
+        int numOfErrors = 0;
+        int numOfFiles = 0;
 
         List<CompilationUnit> cUnits = testCaseCtx.getTestCase().getCompilationUnit();
         for (CompilationUnit cUnit : cUnits) {
@@ -426,6 +420,7 @@ public class TestExecutor {
             testFileCtxs = testCaseCtx.getTestFiles(cUnit);
             expectedResultFileCtxs = testCaseCtx.getExpectedResultFiles(cUnit);
             for (TestFileContext ctx : testFileCtxs) {
+                numOfFiles++;
                 testFile = ctx.getFile();
                 statement = readTestFile(testFile);
                 boolean failed = false;
@@ -566,8 +561,7 @@ public class TestExecutor {
                                 e.printStackTrace();
                             }
                             if (!failed) {
-                                throw new Exception(
-                                        "Test \"" + testFile + "\" FAILED!\n  An exception"
+ "is expected.");
+                                throw new Exception("Test \"" + testFile + "\" FAILED!\n
 An exception is expected.");
                             }
                             System.err.println("...but that was expected.");
                             break;
@@ -576,22 +570,36 @@ public class TestExecutor {
                     }
 
                 } catch (Exception e) {
-
                     System.err.println("testFile " + testFile.toString() + " raised an exception:");
-
-                    e.printStackTrace();
-                    if (cUnit.getExpectedError().isEmpty()) {
+                    boolean unExpectedFailure = false;
+                    numOfErrors++;
+                    if (cUnit.getExpectedError().size() < numOfErrors) {
+                        unExpectedFailure = true;
+                    } else {
+                        // Get the expected exception
+                        ExpectedError expectedError = cUnit.getExpectedError().get(numOfErrors
- 1);
+                        if (e.toString().contains(expectedError.getValue())) {
+                            System.err.println("...but that was expected.");
+                        } else {
+                            unExpectedFailure = true;
+                        }
+                    }
+                    if (unExpectedFailure) {
+                        e.printStackTrace();
                         System.err.println("...Unexpected!");
                         if (failedGroup != null) {
                             failedGroup.getTestCase().add(testCaseCtx.getTestCase());
                         }
                         throw new Exception("Test \"" + testFile + "\" FAILED!", e);
-                    } else {
-                        LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath()
+ "/" + cUnit.getName()
-                                + " failed as expected: " + e.getMessage());
-                        System.err.println("...but that was expected.");
                     }
-
+                } finally {
+                    if (numOfFiles == testFileCtxs.size() && numOfErrors < cUnit.getExpectedError().size())
{
+                        System.err.println("...Unexpected!");
+                        Exception e = new Exception(
+                                "Test \"" + cUnit.getName() + "\" FAILED!\nExpected error
was not thrown...");
+                        e.printStackTrace();
+                        throw e;
+                    }
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c614ce5c/asterix-installer/src/test/resources/transactionts/testsuite.xml
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/transactionts/testsuite.xml b/asterix-installer/src/test/resources/transactionts/testsuite.xml
index cf95132..ce2d0bd 100644
--- a/asterix-installer/src/test/resources/transactionts/testsuite.xml
+++ b/asterix-installer/src/test/resources/transactionts/testsuite.xml
@@ -80,10 +80,10 @@
     <test-case FilePath="recover_after_abort">
       <compilation-unit name="temp_primary_plus_keyword_secondary_index">
         <output-dir compare="Text">primary_plus_keyword_secondary_index</output-dir>
-        <expected-error>org.apache.hyracks.api.exceptions.HyracksDataException</expected-error>
+        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException:
Could not find dataset Fragile in dataverse recovery</expected-error>
       </compilation-unit>
     </test-case>
-      
+
     <test-case FilePath="recover_after_abort">
       <compilation-unit name="temp_primary_plus_ngram_index">
         <output-dir compare="Text">primary_plus_ngram_index</output-dir>
@@ -92,9 +92,9 @@
     </test-case>
 
     <test-case FilePath="recover_after_abort">
-      <compilation-unit name="primary_plus_multiple_secondary_indices">
+      <compilation-unit name="primary_plus_multiple_secondary_indices"><!-- The
only exception here is during the kill command which is in a different JVM, hence not caught
-->
         <output-dir compare="Text">primary_plus_multiple_secondary_indices</output-dir>
-        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+        <!-- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
-->
       </compilation-unit>
     </test-case>
  </test-group>
@@ -152,14 +152,15 @@
     <test-case FilePath="recovery_ddl">
       <compilation-unit name="temp_dataset_recovery">
         <output-dir compare="Text">dataset_recovery</output-dir>
-        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+        <!-- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
-->
       </compilation-unit>
     </test-case>
 
     <test-case FilePath="recovery_ddl">
       <compilation-unit name="temp_delete_after_recovery">
         <output-dir compare="Text">delete_after_recovery</output-dir>
-        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException:
Cannot find dataset Fragile_raw in dataverse recovery</expected-error>
+        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException:
Could not find dataset Fragile_raw in dataverse recovery</expected-error>
       </compilation-unit>
     </test-case>
 
@@ -167,6 +168,7 @@
       <compilation-unit name="temp_insert_after_recovery">
         <output-dir compare="Text">insert_after_recovery</output-dir>
         <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException:
Could not find dataset Fragile in dataverse recovery</expected-error>
       </compilation-unit>
     </test-case>
 
@@ -174,13 +176,14 @@
       <compilation-unit name="temp_load_after_recovery">
         <output-dir compare="Text">load_after_recovery</output-dir>
         <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException:
Could not find dataset Fragile_raw in dataverse recovery</expected-error>
       </compilation-unit>
     </test-case>
 
     <test-case FilePath="recovery_ddl">
       <compilation-unit name="temp_secondary_index_recovery">
         <output-dir compare="Text">secondary_index_recovery</output-dir>
-        <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
+        <!-- <expected-error>org.apache.hyracks.algebricks.common.exceptions.AlgebricksException</expected-error>
-->
       </compilation-unit>
     </test-case>
  

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/c614ce5c/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
index 5ef58cd..7c32bdf 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SplitsAndConstraintsUtil.java
@@ -85,9 +85,10 @@ public class SplitsAndConstraintsUtil {
 
                 for (int k = 0; k < numPartitions; k++) {
                     //format: 'storage dir name'/partition_#/dataverse/dataset_idx_index
-                    File f = new File(prepareStoragePartitionPath(storageDirName, nodePartitions[k].getPartitionId())
-                            + (temp ? (File.separator + TEMP_DATASETS_STORAGE_FOLDER) : "")
+ File.separator
-                            + relPathFile);
+                    //temp format: 'storage dir name'/temp/partition_#/dataverse/dataset_idx_index
+                    File f = new File(prepareStoragePartitionPath(
+                            storageDirName + (temp ? (File.separator + TEMP_DATASETS_STORAGE_FOLDER)
: ""),
+                            nodePartitions[k].getPartitionId()) + File.separator + relPathFile);
                     splits.add(getFileSplitForClusterPartition(nodePartitions[k], f));
                 }
             }


Mime
View raw message