accumulo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bil...@apache.org
Subject svn commit: r1332674 - in /accumulo/trunk: bin/ examples/wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/ examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/ examples/wikisearch/query/...
Date Tue, 01 May 2012 14:35:39 GMT
Author: billie
Date: Tue May  1 14:35:39 2012
New Revision: 1332674

URL: http://svn.apache.org/viewvc?rev=1332674&view=rev
Log:
ACCUMULO-564 changes for 0.23 compile compatibility

Removed:
    accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/StandaloneStatusReporter.java
    accumulo/trunk/examples/wikisearch/ingest/src/test/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaMapperTest.java
Modified:
    accumulo/trunk/bin/config.sh
    accumulo/trunk/examples/wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java
    accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java
    accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
    accumulo/trunk/server/src/main/java/org/apache/accumulo/server/Accumulo.java
    accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java

Modified: accumulo/trunk/bin/config.sh
URL: http://svn.apache.org/viewvc/accumulo/trunk/bin/config.sh?rev=1332674&r1=1332673&r2=1332674&view=diff
==============================================================================
--- accumulo/trunk/bin/config.sh (original)
+++ accumulo/trunk/bin/config.sh Tue May  1 14:35:39 2012
@@ -67,11 +67,6 @@ then
 fi
 export HADOOP_HOME
 
-if [ "`$HADOOP_HOME/bin/hadoop version 2>/dev/null | head -1 | cut -f 2 -d .`" != 20 ];
then
-    echo "Accumulo $ACCUMULO_VERSION requires Hadoop version 0.20.x"
-    exit 1
-fi
-
 if [ ! -f "$ACCUMULO_HOME/conf/masters" -o ! -f "$ACCUMULO_HOME/conf/slaves" ]
 then
     if [ ! -f "$ACCUMULO_HOME/conf/masters" -a ! -f "$ACCUMULO_HOME/conf/slaves" ]

Modified: accumulo/trunk/examples/wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java?rev=1332674&r1=1332673&r2=1332674&view=diff
==============================================================================
--- accumulo/trunk/examples/wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java
(original)
+++ accumulo/trunk/examples/wikisearch/ingest/src/main/java/org/apache/accumulo/examples/wikisearch/ingest/WikipediaIngester.java
Tue May  1 14:35:39 2012
@@ -66,8 +66,8 @@ public class WikipediaIngester extends C
     System.exit(res);
   }
   
-  private void createTables(TableOperations tops, String tableName) throws AccumuloException,
AccumuloSecurityException, TableNotFoundException,
-      TableExistsException {
+  public static void createTables(TableOperations tops, String tableName, boolean configureLocalityGroups)
throws AccumuloException, AccumuloSecurityException,
+      TableNotFoundException, TableExistsException {
     // Create the shard table
     String indexTableName = tableName + "Index";
     String reverseIndexTableName = tableName + "ReverseIndex";
@@ -94,7 +94,9 @@ public class WikipediaIngester extends C
       }
       
       // Set the locality group for the full content column family
-      tops.setLocalityGroups(tableName, Collections.singletonMap("WikipediaDocuments", Collections.singleton(new
Text(WikipediaMapper.DOCUMENT_COLUMN_FAMILY))));
+      if (configureLocalityGroups)
+        tops.setLocalityGroups(tableName,
+            Collections.singletonMap("WikipediaDocuments", Collections.singleton(new Text(WikipediaMapper.DOCUMENT_COLUMN_FAMILY))));
       
     }
     
@@ -143,7 +145,7 @@ public class WikipediaIngester extends C
     
     TableOperations tops = connector.tableOperations();
     
-    createTables(tops, tablename);
+    createTables(tops, tablename, true);
     
     configureJob(job);
     

Modified: accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java?rev=1332674&r1=1332673&r2=1332674&view=diff
==============================================================================
--- accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java
(original)
+++ accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/StandaloneStatusReporter.java
Tue May  1 14:35:39 2012
@@ -67,4 +67,8 @@ public class StandaloneStatusReporter ex
   public void incrementRecordsProcessed() {
     recordsProcessed++;
   }
+  
+  public float getProgress() {
+    return 0;
+  }
 }

Modified: accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java?rev=1332674&r1=1332673&r2=1332674&view=diff
==============================================================================
--- accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
(original)
+++ accumulo/trunk/examples/wikisearch/query/src/test/java/org/apache/accumulo/examples/wikisearch/logic/TestQueryLogic.java
Tue May  1 14:35:39 2012
@@ -16,6 +16,8 @@
  */
 package org.apache.accumulo.examples.wikisearch.logic;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.File;
 import java.io.IOException;
 import java.net.URL;
@@ -38,12 +40,12 @@ import org.apache.accumulo.core.data.Val
 import org.apache.accumulo.core.security.Authorizations;
 import org.apache.accumulo.core.util.ContextFactory;
 import org.apache.accumulo.examples.wikisearch.ingest.WikipediaConfiguration;
+import org.apache.accumulo.examples.wikisearch.ingest.WikipediaIngester;
 import org.apache.accumulo.examples.wikisearch.ingest.WikipediaInputFormat.WikipediaInputSplit;
 import org.apache.accumulo.examples.wikisearch.ingest.WikipediaMapper;
 import org.apache.accumulo.examples.wikisearch.parser.RangeCalculator;
 import org.apache.accumulo.examples.wikisearch.reader.AggregatingRecordReader;
 import org.apache.accumulo.examples.wikisearch.sample.Document;
-import org.apache.accumulo.examples.wikisearch.sample.Field;
 import org.apache.accumulo.examples.wikisearch.sample.Results;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
@@ -117,11 +119,8 @@ public class TestQueryLogic {
     
     MockInstance i = new MockInstance();
     c = i.getConnector("root", "pass");
+    WikipediaIngester.createTables(c.tableOperations(), TABLE_NAME, false);
     for (String table : TABLE_NAMES) {
-      try {
-        c.tableOperations().delete(table);
-      } catch (Exception ex) {}
-      c.tableOperations().create(table);
       writerMap.put(new Text(table), c.createBatchWriter(table, 1000L, 1000L, 1));
     }
     
@@ -162,7 +161,7 @@ public class TestQueryLogic {
   }
   
   void debugQuery(String tableName) throws Exception {
-    Scanner s = c.createScanner(tableName, new Authorizations());
+    Scanner s = c.createScanner(tableName, new Authorizations("all"));
     Range r = new Range();
     s.setRange(r);
     for (Entry<Key,Value> entry : s)
@@ -170,17 +169,23 @@ public class TestQueryLogic {
   }
   
   @Test
-  public void testTitle() {
+  public void testTitle() throws Exception {
     Logger.getLogger(AbstractQueryLogic.class).setLevel(Level.OFF);
     Logger.getLogger(RangeCalculator.class).setLevel(Level.OFF);
     List<String> auths = new ArrayList<String>();
     auths.add("enwiki");
-    Results results = table.runQuery(c, auths, "TITLE == 'afghanistanhistory'", null, null,
null);
-    for (Document doc : results.getResults()) {
-      System.out.println("id: " + doc.getId());
-      for (Field field : doc.getFields())
-        System.out.println(field.getFieldName() + " -> " + field.getFieldValue());
-    }
+    
+    Results results = table.runQuery(c, auths, "TITLE == 'asphalt' or TITLE == 'abacus' or
TITLE == 'acid' or TITLE == 'acronym'", null, null, null);
+    List<Document> docs = results.getResults();
+    assertEquals(4, docs.size());
+    
+    /*
+     * debugQuery(METADATA_TABLE_NAME); debugQuery(TABLE_NAME); debugQuery(INDEX_TABLE_NAME);
debugQuery(RINDEX_TABLE_NAME);
+     * 
+     * results = table.runQuery(c, auths, "TEXT == 'abacus'", null, null, null); docs = results.getResults();
assertEquals(4, docs.size()); for (Document doc :
+     * docs) { System.out.println("id: " + doc.getId()); for (Field field : doc.getFields())
System.out.println(field.getFieldName() + " -> " +
+     * field.getFieldValue()); }
+     */
   }
   
 }

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/Accumulo.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/Accumulo.java?rev=1332674&r1=1332673&r2=1332674&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/Accumulo.java (original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/Accumulo.java Tue May 
1 14:35:39 2012
@@ -35,7 +35,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.log4j.Logger;
 import org.apache.log4j.helpers.LogLog;
 import org.apache.log4j.xml.DOMConfigurator;
@@ -55,7 +55,7 @@ public class Accumulo {
       throw new RuntimeException("Unable to set accumulo version: an error occurred.", e);
     }
   }
-
+  
   public static synchronized int getAccumuloPersistentVersion(FileSystem fs) {
     int dataVersion;
     try {
@@ -163,7 +163,7 @@ public class Accumulo {
         if (!(fs instanceof DistributedFileSystem))
           break;
         DistributedFileSystem dfs = (DistributedFileSystem) FileSystem.get(CachedConfiguration.getInstance());
-        if (!dfs.setSafeMode(SafeModeAction.SAFEMODE_GET))
+        if (!dfs.setSafeMode(FSConstants.SafeModeAction.SAFEMODE_GET))
           break;
         log.warn("Waiting for the NameNode to leave safemode");
       } catch (IOException ex) {

Modified: accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
URL: http://svn.apache.org/viewvc/accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java?rev=1332674&r1=1332673&r2=1332674&view=diff
==============================================================================
--- accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
(original)
+++ accumulo/trunk/server/src/main/java/org/apache/accumulo/server/monitor/servlets/DefaultServlet.java
Tue May  1 14:35:39 2012
@@ -55,7 +55,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
-import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
+import org.apache.hadoop.hdfs.protocol.FSConstants;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobClient;
@@ -194,12 +194,11 @@ public class DefaultServlet extends Basi
     if (points)
       opts = "points: { show: true, radius: 1 }";
     
-
     sb.append("    $.plot($(\"#" + id + "\"),");
     String sep = "";
     
     String colors[] = new String[] {"red", "blue", "green", "black"};
-
+    
     sb.append("[");
     for (int i = 0; i < series.length; i++) {
       sb.append(sep);
@@ -299,7 +298,7 @@ public class DefaultServlet extends Basi
         ContentSummary rootSummary = fs.getContentSummary(new Path("/"));
         consumed = String.format("%.2f%%", acu.getSpaceConsumed() * 100. / rootSummary.getSpaceConsumed());
         diskUsed = bytes(acu.getSpaceConsumed());
-
+        
         boolean highlight = false;
         tableRow(sb, (highlight = !highlight), "Disk&nbsp;Used", diskUsed);
         if (fs.getUsed() != 0)
@@ -334,8 +333,8 @@ public class DefaultServlet extends Basi
       tableRow(sb, (highlight = !highlight), "Unreplicated&nbsp;Capacity", bytes(fs.getRawCapacity()));
       tableRow(sb, (highlight = !highlight), "%&nbsp;Used", NumberType.commas(fs.getRawUsed()
* 100. / fs.getRawCapacity(), 0, 90, 0, 100) + "%");
       tableRow(sb, (highlight = !highlight), "Corrupt&nbsp;Blocks", NumberType.commas(fs.getCorruptBlocksCount(),
0, 0));
-      DatanodeInfo[] liveNodes = fs.getClient().datanodeReport(DatanodeReportType.LIVE);
-      DatanodeInfo[] deadNodes = fs.getClient().datanodeReport(DatanodeReportType.DEAD);
+      DatanodeInfo[] liveNodes = fs.getClient().datanodeReport(FSConstants.DatanodeReportType.LIVE);
+      DatanodeInfo[] deadNodes = fs.getClient().datanodeReport(FSConstants.DatanodeReportType.DEAD);
       tableRow(sb, (highlight = !highlight), "<a href='" + liveUrl + "'>Live&nbsp;Data&nbsp;Nodes</a>",
NumberType.commas(liveNodes.length));
       tableRow(sb, (highlight = !highlight), "<a href='" + deadUrl + "'>Dead&nbsp;Data&nbsp;Nodes</a>",
NumberType.commas(deadNodes.length));
       long count = 0;
@@ -356,7 +355,7 @@ public class DefaultServlet extends Basi
     sb.append("<table>\n");
     try {
       InetSocketAddress address = JobTracker.getAddress(conf);
-
+      
       @SuppressWarnings("deprecation")
       // No alternative api in hadoop 20
       JobClient jc = new JobClient(new org.apache.hadoop.mapred.JobConf(conf));



Mime
View raw message