incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject git commit: Removing dead code and cleaning up deps.
Date Tue, 29 Apr 2014 14:55:50 GMT
Repository: incubator-blur
Updated Branches:
  refs/heads/apache-blur-0.2 c716c8479 -> 2f17851dc


Removing dead code and cleaning up deps.


Project: http://git-wip-us.apache.org/repos/asf/incubator-blur/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-blur/commit/2f17851d
Tree: http://git-wip-us.apache.org/repos/asf/incubator-blur/tree/2f17851d
Diff: http://git-wip-us.apache.org/repos/asf/incubator-blur/diff/2f17851d

Branch: refs/heads/apache-blur-0.2
Commit: 2f17851dce28aca67bf78cdc073df04dee094bac
Parents: c716c84
Author: Aaron McCurry <amccurry@gmail.com>
Authored: Tue Apr 29 10:55:35 2014 -0400
Committer: Aaron McCurry <amccurry@gmail.com>
Committed: Tue Apr 29 10:55:35 2014 -0400

----------------------------------------------------------------------
 blur-thrift/pom.xml                             |  11 +
 .../apache/blur/thrift/SortFieldComparator.java |  16 +-
 blur-util/pom.xml                               |  38 +--
 .../org/apache/blur/metrics/HDFSReporter.java   | 329 -------------------
 pom.xml                                         |   2 +
 5 files changed, 32 insertions(+), 364 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/2f17851d/blur-thrift/pom.xml
----------------------------------------------------------------------
diff --git a/blur-thrift/pom.xml b/blur-thrift/pom.xml
index bb336b6..ee0c3bc 100644
--- a/blur-thrift/pom.xml
+++ b/blur-thrift/pom.xml
@@ -68,6 +68,17 @@ under the License.
 			<artifactId>httpclient</artifactId>
 			<version>${httpclient.version}</version>
 		</dependency>
+        <dependency>
+            <groupId>commons-lang</groupId>
+            <artifactId>commons-lang</artifactId>
+            <version>${commons-lang.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>javax.servlet</groupId>
+            <artifactId>servlet-api</artifactId>
+            <version>${servlet-api.version}</version>
+            <scope>provided</scope>
+        </dependency>
 	</dependencies>
 
 	<repositories>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/2f17851d/blur-thrift/src/main/java/org/apache/blur/thrift/SortFieldComparator.java
----------------------------------------------------------------------
diff --git a/blur-thrift/src/main/java/org/apache/blur/thrift/SortFieldComparator.java b/blur-thrift/src/main/java/org/apache/blur/thrift/SortFieldComparator.java
index b55c7e3..5c93f5a 100644
--- a/blur-thrift/src/main/java/org/apache/blur/thrift/SortFieldComparator.java
+++ b/blur-thrift/src/main/java/org/apache/blur/thrift/SortFieldComparator.java
@@ -20,7 +20,6 @@ import java.util.Comparator;
 
 import org.apache.blur.thrift.generated.SortFieldResult;
 import org.apache.blur.thrift.generated.SortFieldResult._Fields;
-import org.apache.hadoop.io.WritableComparator;
 
 public class SortFieldComparator implements Comparator<SortFieldResult> {
 
@@ -53,7 +52,20 @@ public class SortFieldComparator implements Comparator<SortFieldResult>
{
   }
 
   public int compare(byte[] b1, byte[] b2) {
-    return WritableComparator.compareBytes(b1, 0, b1.length, b2, 0, b2.length);
+    return compareBytes(b1, 0, b1.length, b2, 0, b2.length);
+  }
+
+  public static int compareBytes(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+    int end1 = s1 + l1;
+    int end2 = s2 + l2;
+    for (int i = s1, j = s2; i < end1 && j < end2; i++, j++) {
+      int a = (b1[i] & 0xff);
+      int b = (b2[j] & 0xff);
+      if (a != b) {
+        return a - b;
+      }
+    }
+    return l1 - l2;
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/2f17851d/blur-util/pom.xml
----------------------------------------------------------------------
diff --git a/blur-util/pom.xml b/blur-util/pom.xml
index 062e93b..8390572 100644
--- a/blur-util/pom.xml
+++ b/blur-util/pom.xml
@@ -33,6 +33,11 @@ under the License.
 	<description>The Blur util module contains mostly utility classes used throughout
the Blur project.</description>
 
 	<dependencies>
+        <dependency>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+            <version>${commons-logging.version}</version>
+        </dependency>
 		<dependency>
 			<groupId>org.apache.zookeeper</groupId>
 			<artifactId>zookeeper</artifactId>
@@ -200,37 +205,4 @@ under the License.
 			</plugins>
 		</pluginManagement>
 	</build>
-	
-	<profiles>
-		<profile>
-			<id>hadoop-1x</id>
-			<activation>
-				<property>
-					<name>hadoop1</name>
-				</property>
-			</activation>
-			<dependencies>
-				<dependency>
-					<groupId>org.apache.hadoop</groupId>
-					<artifactId>hadoop-core</artifactId>
-					<version>${hadoop.version}</version>
-				</dependency>
-			</dependencies>
-		</profile>
-		<profile>
-			<id>hadoop-2.2</id>
-			<activation>
-				<property>
-					<name>hadoop2</name>
-				</property>
-			</activation>
-			<dependencies>
-				<dependency>
-					<groupId>org.apache.hadoop</groupId>
-					<artifactId>hadoop-client</artifactId>
-					<version>${hadoop.version}</version>
-				</dependency>
-			</dependencies>
-		</profile>
-	</profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/2f17851d/blur-util/src/main/java/org/apache/blur/metrics/HDFSReporter.java
----------------------------------------------------------------------
diff --git a/blur-util/src/main/java/org/apache/blur/metrics/HDFSReporter.java b/blur-util/src/main/java/org/apache/blur/metrics/HDFSReporter.java
deleted file mode 100644
index 9e413f6..0000000
--- a/blur-util/src/main/java/org/apache/blur/metrics/HDFSReporter.java
+++ /dev/null
@@ -1,329 +0,0 @@
-package org.apache.blur.metrics;
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Map.Entry;
-import java.util.SortedMap;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.json.JSONException;
-import org.json.JSONObject;
-
-import com.yammer.metrics.Metrics;
-import com.yammer.metrics.core.Clock;
-import com.yammer.metrics.core.Counter;
-import com.yammer.metrics.core.Gauge;
-import com.yammer.metrics.core.Histogram;
-import com.yammer.metrics.core.Metered;
-import com.yammer.metrics.core.Metric;
-import com.yammer.metrics.core.MetricName;
-import com.yammer.metrics.core.MetricProcessor;
-import com.yammer.metrics.core.MetricsRegistry;
-import com.yammer.metrics.core.Timer;
-import com.yammer.metrics.reporting.AbstractPollingReporter;
-import com.yammer.metrics.stats.Snapshot;
-
-public class HDFSReporter extends AbstractPollingReporter implements MetricProcessor<HDFSReporter.Context>
{
-
-  private static Log LOG = LogFactory.getLog(HDFSReporter.class);
-
-  static class Context {
-
-    private final Path path;
-    private final SimpleDateFormat formatter;
-    private final String name;
-    private final FileSystem fileSystem;
-    private String currentOutputFilePattern;
-    private long now;
-    private PrintWriter printStream;
-    private FSDataOutputStream outputStream;
-    private Path currentOutputPath;
-    private long maxTimeToKeep;
-
-    public Context(Path path, Configuration configuration, String filePattern, String name)
throws IOException {
-      this.path = path;
-      this.fileSystem = path.getFileSystem(configuration);
-      if (fileSystem.exists(path)) {
-        if (!fileSystem.getFileStatus(path).isDir()) {
-          throw new IOException("Path [" + path + "] is not a directory.");
-        }
-      } else {
-        fileSystem.mkdirs(path);
-      }
-      this.name = name;
-      this.formatter = new SimpleDateFormat(filePattern);
-      this.maxTimeToKeep = TimeUnit.MINUTES.toMillis(10);
-    }
-
-    public void open(long now) throws IOException {
-      this.now = now;
-      String outputFilePattern = formatter.format(new Date(now));
-      if (!outputFilePattern.equals(currentOutputFilePattern)) {
-        // roll file
-        rollFile(outputFilePattern);
-        cleanupOldMetrics();
-      }
-    }
-
-    private void cleanupOldMetrics() throws IOException {
-      FileStatus[] listStatus = fileSystem.listStatus(path);
-      for (FileStatus fileStatus : listStatus) {
-        Path filePath = fileStatus.getPath();
-        String fileName = filePath.getName();
-        if (fileName.startsWith(name + ".")) {
-          int sIndex = fileName.indexOf('.');
-          int eIndex = fileName.indexOf('.', sIndex + 1);
-          String pattern;
-          if (eIndex < 0) {
-            pattern = fileName.substring(sIndex + 1);
-          } else {
-            pattern = fileName.substring(sIndex + 1, eIndex);
-          }
-          Date date;
-          try {
-            date = formatter.parse(pattern);
-          } catch (ParseException e) {
-            throw new IOException(e);
-          }
-          if (date.getTime() + maxTimeToKeep < now) {
-            fileSystem.delete(filePath, false);
-          }
-        }
-      }
-    }
-
-    private void rollFile(String newOutputFilePattern) throws IOException {
-      if (printStream != null) {
-        printStream.close();
-      }
-      currentOutputPath = new Path(path, name + "." + newOutputFilePattern);
-      if (fileSystem.exists(currentOutputPath)) {
-        // try to append
-        try {
-          outputStream = fileSystem.append(currentOutputPath);
-        } catch (IOException e) {
-          currentOutputPath = new Path(path, name + "." + newOutputFilePattern + "." + now);
-          outputStream = fileSystem.create(currentOutputPath);
-        }
-      } else {
-        outputStream = fileSystem.create(currentOutputPath);
-      }
-      printStream = new PrintWriter(outputStream);
-      currentOutputFilePattern = newOutputFilePattern;
-    }
-
-    public void write(JSONObject jsonObject) throws JSONException {
-      jsonObject.put("timestamp", now);
-      printStream.println(jsonObject.toString());
-    }
-
-    public void flush() throws IOException {
-      printStream.flush();
-      outputStream.flush();
-      outputStream.sync();
-    }
-  }
-
-  public static void enable(Configuration configuration, Path path, String filePattern, String
name, long period,
-      TimeUnit unit) throws IOException {
-    enable(Metrics.defaultRegistry(), configuration, path, filePattern, name, period, unit);
-  }
-
-  public static void enable(MetricsRegistry metricsRegistry, Configuration configuration,
Path path,
-      String filePattern, String name, long period, TimeUnit unit) throws IOException {
-    final HDFSReporter reporter = new HDFSReporter(metricsRegistry, configuration, path,
filePattern, name);
-    reporter.start(period, unit);
-  }
-
-  private final Context context;
-  private final Clock clock;
-
-  public HDFSReporter(Configuration configuration, Path path, String filePattern, String
name) throws IOException {
-    this(Metrics.defaultRegistry(), configuration, path, filePattern, name);
-  }
-
-  public HDFSReporter(MetricsRegistry metricsRegistry, Configuration configuration, Path
path, String filePattern,
-      String name) throws IOException {
-    this(metricsRegistry, configuration, path, filePattern, name, Clock.defaultClock());
-  }
-
-  public HDFSReporter(MetricsRegistry metricsRegistry, Configuration configuration, Path
path, String filePattern,
-      String name, Clock clock) throws IOException {
-    super(metricsRegistry, "hdfs-reporter");
-    this.context = new Context(path, configuration, filePattern, name);
-    this.clock = clock;
-  }
-
-  @Override
-  public void run() {
-    try {
-      context.open(clock.time());
-      for (Entry<String, SortedMap<MetricName, Metric>> entry : getMetricsRegistry().groupedMetrics().entrySet())
{
-        for (Entry<MetricName, Metric> subEntry : entry.getValue().entrySet()) {
-          subEntry.getValue().processWith(this, subEntry.getKey(), context);
-        }
-      }
-      context.flush();
-    } catch (Throwable t) {
-      LOG.error("Unknown error during the processing of metrics.", t);
-    }
-  }
-
-  @Override
-  public void processGauge(MetricName name, Gauge<?> gauge, HDFSReporter.Context context)
{
-    JSONObject jsonObject = new JSONObject();
-    try {
-      jsonObject.put("name", getName(name));
-      jsonObject.put("type", "gauge");
-      jsonObject.put("value", gauge.value());
-      context.write(jsonObject);
-    } catch (JSONException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void processCounter(MetricName name, Counter counter, HDFSReporter.Context context)
{
-    JSONObject jsonObject = new JSONObject();
-    try {
-      jsonObject.put("name", getName(name));
-      jsonObject.put("type", "counter");
-      jsonObject.put("value", counter.count());
-      context.write(jsonObject);
-    } catch (JSONException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void processMeter(MetricName name, Metered meter, HDFSReporter.Context context)
{
-    JSONObject jsonObject = new JSONObject();
-    try {
-      jsonObject.put("name", getName(name));
-      jsonObject.put("type", "meter");
-      JSONObject meterJsonObject = new JSONObject();
-
-      addMeterInfo(meter, meterJsonObject);
-
-      jsonObject.put("value", meterJsonObject);
-
-      context.write(jsonObject);
-    } catch (JSONException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private void addMeterInfo(Metered meter, JSONObject meterJsonObject) throws JSONException
{
-    meterJsonObject.put("rateUnit", meter.rateUnit());
-    meterJsonObject.put("eventType", meter.eventType());
-    meterJsonObject.put("count", meter.count());
-    meterJsonObject.put("meanRate", meter.meanRate());
-    meterJsonObject.put("oneMinuteRate", meter.oneMinuteRate());
-    meterJsonObject.put("fiveMinuteRate", meter.fiveMinuteRate());
-    meterJsonObject.put("fifteenMinuteRate", meter.fifteenMinuteRate());
-  }
-
-  @Override
-  public void processHistogram(MetricName name, Histogram histogram, HDFSReporter.Context
context) {
-    JSONObject jsonObject = new JSONObject();
-    try {
-      jsonObject.put("name", getName(name));
-      jsonObject.put("type", "meter");
-      JSONObject histogramJsonObject = new JSONObject();
-
-      histogramJsonObject.put("min", histogram.min());
-      histogramJsonObject.put("max", histogram.max());
-      histogramJsonObject.put("mean", histogram.mean());
-      histogramJsonObject.put("stdDev", histogram.stdDev());
-
-      Snapshot snapshot = histogram.getSnapshot();
-      JSONObject snapshotJsonObject = new JSONObject();
-      snapshotJsonObject.put("median", snapshot.getMedian());
-      snapshotJsonObject.put("75%", snapshot.get75thPercentile());
-      snapshotJsonObject.put("95%", snapshot.get95thPercentile());
-      snapshotJsonObject.put("98%", snapshot.get98thPercentile());
-      snapshotJsonObject.put("99%", snapshot.get99thPercentile());
-      snapshotJsonObject.put("99.9%", snapshot.get999thPercentile());
-
-      histogramJsonObject.put("snapshot", snapshotJsonObject);
-
-      jsonObject.put("value", histogramJsonObject);
-      context.write(jsonObject);
-    } catch (JSONException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  @Override
-  public void processTimer(MetricName name, Timer timer, HDFSReporter.Context context) {
-    JSONObject jsonObject = new JSONObject();
-    try {
-      jsonObject.put("name", getName(name));
-      jsonObject.put("type", "meter");
-      JSONObject timerJsonObject = new JSONObject();
-
-      timerJsonObject.put("unit", timer.durationUnit());
-      timerJsonObject.put("min", timer.min());
-      timerJsonObject.put("max", timer.max());
-      timerJsonObject.put("mean", timer.mean());
-      timerJsonObject.put("stdDev", timer.stdDev());
-      addMeterInfo(timer, timerJsonObject);
-
-      Snapshot snapshot = timer.getSnapshot();
-      JSONObject snapshotJsonObject = new JSONObject();
-      snapshotJsonObject.put("median", snapshot.getMedian());
-      snapshotJsonObject.put("75%", snapshot.get75thPercentile());
-      snapshotJsonObject.put("95%", snapshot.get95thPercentile());
-      snapshotJsonObject.put("98%", snapshot.get98thPercentile());
-      snapshotJsonObject.put("99%", snapshot.get99thPercentile());
-      snapshotJsonObject.put("99.9%", snapshot.get999thPercentile());
-
-      timerJsonObject.put("snapshot", snapshotJsonObject);
-
-      jsonObject.put("value", timerJsonObject);
-
-      context.write(jsonObject);
-    } catch (JSONException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private JSONObject getName(MetricName metricName) throws JSONException {
-    String group = metricName.getGroup();
-    String name = metricName.getName();
-    String scope = metricName.getScope();
-    String type = metricName.getType();
-    JSONObject jsonObject = new JSONObject();
-    jsonObject.put("name", name);
-    jsonObject.put("group", group);
-    jsonObject.put("scope", scope);
-    jsonObject.put("type", type);
-    return jsonObject;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/2f17851d/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index f4cc080..b9b7937 100644
--- a/pom.xml
+++ b/pom.xml
@@ -172,6 +172,8 @@ under the License.
 	</scm>
 
 	<properties>
+                <commons-logging.version>1.1.3</commons-logging.version>
+                <commons-lang.version>2.4</commons-lang.version>
 		<zookeeper.version>3.4.5</zookeeper.version>
 		<log4j.version>1.2.15</log4j.version>
 		<jersey.version>1.14</jersey.version>


Mime
View raw message