Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id D7B97200CFE for ; Fri, 8 Sep 2017 18:08:47 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id D6C411609BE; Fri, 8 Sep 2017 16:08:47 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 7F0021609BD for ; Fri, 8 Sep 2017 18:08:46 +0200 (CEST) Received: (qmail 89452 invoked by uid 500); 8 Sep 2017 16:08:44 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 89439 invoked by uid 99); 8 Sep 2017 16:08:44 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 08 Sep 2017 16:08:44 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id CFE0AF5566; Fri, 8 Sep 2017 16:08:43 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: jcamacho@apache.org To: commits@hive.apache.org Message-Id: <712f31e6d9354c8f95a8cbd02cabcb23@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: hive git commit: HIVE-17468: Shade and package appropriate jackson version for druid storage handler (Slim Bouguerra, reviewed by Jesus Camacho Rodriguez) Date: Fri, 8 Sep 2017 16:08:43 +0000 (UTC) archived-at: Fri, 08 Sep 2017 16:08:48 -0000 Repository: hive Updated Branches: refs/heads/master 09afd83dd -> 5f26f393e HIVE-17468: Shade and package appropriate jackson version for druid storage handler (Slim Bouguerra, reviewed by Jesus Camacho Rodriguez) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5f26f393 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5f26f393 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5f26f393 Branch: refs/heads/master Commit: 5f26f393ecc0cca4afd3a2ed3c1d46acf47701d1 Parents: 09afd83 Author: Slim Bouguerra Authored: Fri Sep 8 09:08:29 2017 -0700 Committer: Jesus Camacho Rodriguez Committed: Fri Sep 8 09:08:29 2017 -0700 ---------------------------------------------------------------------- druid-handler/pom.xml | 48 +++++++++----------- .../hive/druid/DruidStorageHandlerUtils.java | 7 +++ .../hadoop/hive/druid/io/DruidOutputFormat.java | 9 ++-- .../druid/io/DruidQueryBasedInputFormat.java | 8 +--- .../hadoop/hive/druid/io/DruidRecordWriter.java | 3 +- .../serde/DruidGroupByQueryRecordReader.java | 5 +- .../serde/DruidSelectQueryRecordReader.java | 5 +- .../hadoop/hive/druid/serde/DruidSerDe.java | 15 +++--- .../serde/DruidTimeseriesQueryRecordReader.java | 5 +- .../druid/serde/DruidTopNQueryRecordReader.java | 5 +- .../TestHiveDruidQueryBasedInputFormat.java | 6 +-- .../hive/ql/io/TestDruidRecordWriter.java | 19 +++++--- 12 files changed, 66 insertions(+), 69 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/pom.xml ---------------------------------------------------------------------- diff --git a/druid-handler/pom.xml b/druid-handler/pom.xml index 81c744f..48b2af9 100644 --- a/druid-handler/pom.xml +++ b/druid-handler/pom.xml @@ -53,6 +53,18 @@ com.google.guava guava + + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind + @@ -119,18 +131,6 @@ ${druid.version} - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-databind - - com.google.code.findbugs annotations @@ -216,24 +216,20 @@ com.google.guava guava - - - - org.apache.calcite - calcite-druid - ${calcite.version} - - org.apache.calcite.avatica - avatica-core + com.fasterxml.jackson.core + jackson-core + + + com.fasterxml.jackson.core + jackson-annotations + + + com.fasterxml.jackson.core + jackson-databind - - org.apache.calcite.avatica - avatica - ${avatica.version} - junit http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java index 3eeb0c3..7169140 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java @@ -41,6 +41,7 @@ import io.druid.timeline.partition.NumberedShardSpec; import io.druid.timeline.partition.PartitionChunk; import io.druid.timeline.partition.ShardSpec; +import org.apache.calcite.adapter.druid.LocalInterval; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -77,6 +78,7 @@ import org.jboss.netty.handler.codec.http.HttpHeaders; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.joda.time.chrono.ISOChronology; import org.joda.time.format.ISODateTimeFormat; import org.skife.jdbi.v2.FoldController; import org.skife.jdbi.v2.Folder3; @@ -122,7 +124,12 @@ public final class DruidStorageHandlerUtils { private static final Logger LOG = LoggerFactory.getLogger(DruidStorageHandlerUtils.class); private static final String SMILE_CONTENT_TYPE = "application/x-jackson-smile"; + public static final String DEFAULT_TIMESTAMP_COLUMN = "__time"; + public static final Interval DEFAULT_INTERVAL = new Interval( + new DateTime("1900-01-01", ISOChronology.getInstanceUTC()), + new DateTime("3000-01-01", ISOChronology.getInstanceUTC()) + ).withChronology(ISOChronology.getInstanceUTC()); /** * Mapper to use to serialize/deserialize Druid objects (JSON) */ http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java index 9d2ec82..8156231 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java @@ -41,7 +41,6 @@ import io.druid.segment.indexing.granularity.GranularitySpec; import io.druid.segment.indexing.granularity.UniformGranularitySpec; import io.druid.segment.realtime.plumber.CustomVersioningPolicy; -import org.apache.calcite.adapter.druid.DruidTable; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -117,8 +116,8 @@ public class DruidOutputFormat implements HiveOutputFormat implements HiveOutputFormat implements HiveOutputFormat aggregatorFactories = aggregatorFactoryBuilder.build(); final InputRowParser inputRowParser = new MapInputRowParser(new TimeAndDimsParseSpec( - new TimestampSpec(DruidTable.DEFAULT_TIMESTAMP_COLUMN, "auto", null), + new TimestampSpec(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, "auto", null), new DimensionsSpec(dimensions, Lists.newArrayList(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME), null ) http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java index 2f53616..bcabbd6 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidQueryBasedInputFormat.java @@ -27,7 +27,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.calcite.adapter.druid.DruidDateTimeUtils; import org.apache.calcite.adapter.druid.DruidTable; import org.apache.commons.lang3.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; @@ -56,7 +55,6 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.jboss.netty.handler.codec.http.HttpMethod; import org.joda.time.Interval; -import org.joda.time.Period; import org.joda.time.chrono.ISOChronology; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -65,10 +63,6 @@ import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonMappingException; import com.google.common.collect.Lists; -import com.metamx.common.lifecycle.Lifecycle; -import com.metamx.http.client.HttpClient; -import com.metamx.http.client.HttpClientConfig; -import com.metamx.http.client.HttpClientInit; import com.metamx.http.client.Request; import io.druid.query.BaseQuery; @@ -308,7 +302,7 @@ public class DruidQueryBasedInputFormat extends InputFormat intervals = new ArrayList<>(); if (query.getIntervals().size() == 1 && query.getIntervals().get(0).withChronology( - ISOChronology.getInstanceUTC()).equals(DruidTable.DEFAULT_INTERVAL)) { + ISOChronology.getInstanceUTC()).equals(DruidStorageHandlerUtils.DEFAULT_INTERVAL)) { // Default max and min, we should execute a time boundary query to get a // more precise range TimeBoundaryQueryBuilder timeBuilder = new Druids.TimeBoundaryQueryBuilder(); http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java index e97f588..cf4dad6 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java @@ -41,7 +41,6 @@ import io.druid.segment.realtime.appenderator.SegmentsAndMetadata; import io.druid.segment.realtime.plumber.Committers; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; -import org.apache.calcite.adapter.druid.DruidTable; import org.apache.commons.io.FileUtils; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -239,7 +238,7 @@ public class DruidRecordWriter implements RecordWriter columnInfo : schemaInfo.getColumns().entrySet()) { - if (columnInfo.getKey().equals(DruidTable.DEFAULT_TIMESTAMP_COLUMN)) { + if (columnInfo.getKey().equals(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN)) { // Special handling for timestamp column columnNames.add(columnInfo.getKey()); // field name PrimitiveTypeInfo type = TypeInfoFactory.timestampTypeInfo; // field type @@ -308,7 +307,7 @@ public class DruidSerDe extends AbstractSerDe { List columnNames, List columnTypes, Map mapColumnNamesTypes) { // Timestamp column - columnNames.add(DruidTable.DEFAULT_TIMESTAMP_COLUMN); + columnNames.add(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN); columnTypes.add(TypeInfoFactory.timestampTypeInfo); // Aggregator columns for (AggregatorFactory af : query.getAggregatorSpecs()) { @@ -336,7 +335,7 @@ public class DruidSerDe extends AbstractSerDe { List columnNames, List columnTypes, Map mapColumnNamesTypes) { // Timestamp column - columnNames.add(DruidTable.DEFAULT_TIMESTAMP_COLUMN); + columnNames.add(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN); columnTypes.add(TypeInfoFactory.timestampTypeInfo); // Dimension column columnNames.add(query.getDimensionSpec().getOutputName()); @@ -368,7 +367,7 @@ public class DruidSerDe extends AbstractSerDe { String address, Map mapColumnNamesTypes) throws SerDeException { // Timestamp column - columnNames.add(DruidTable.DEFAULT_TIMESTAMP_COLUMN); + columnNames.add(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN); columnTypes.add(TypeInfoFactory.timestampTypeInfo); // Dimension columns for (DimensionSpec ds : query.getDimensions()) { @@ -410,7 +409,7 @@ public class DruidSerDe extends AbstractSerDe { List columnNames, List columnTypes, Map mapColumnNamesTypes) { // Timestamp column - columnNames.add(DruidTable.DEFAULT_TIMESTAMP_COLUMN); + columnNames.add(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN); columnTypes.add(TypeInfoFactory.timestampTypeInfo); // Dimension columns for (DimensionSpec ds : query.getDimensions()) { http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTimeseriesQueryRecordReader.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTimeseriesQueryRecordReader.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTimeseriesQueryRecordReader.java index 8c2fb10..a1c8488 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTimeseriesQueryRecordReader.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTimeseriesQueryRecordReader.java @@ -21,7 +21,6 @@ import java.io.IOException; import java.io.InputStream; import java.util.List; -import org.apache.calcite.adapter.druid.DruidTable; import org.apache.hadoop.hive.druid.DruidStorageHandlerUtils; import org.apache.hadoop.io.NullWritable; @@ -71,7 +70,7 @@ public class DruidTimeseriesQueryRecordReader public DruidWritable getCurrentValue() throws IOException, InterruptedException { // Create new value DruidWritable value = new DruidWritable(); - value.getValue().put(DruidTable.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); + value.getValue().put(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); value.getValue().putAll(current.getValue().getBaseObject()); return value; } @@ -81,7 +80,7 @@ public class DruidTimeseriesQueryRecordReader if (nextKeyValue()) { // Update value value.getValue().clear(); - value.getValue().put(DruidTable.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); + value.getValue().put(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); value.getValue().putAll(current.getValue().getBaseObject()); return true; } http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java index d431925..afdf670 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/DruidTopNQueryRecordReader.java @@ -22,7 +22,6 @@ import java.io.InputStream; import java.util.Iterator; import java.util.List; -import org.apache.calcite.adapter.druid.DruidTable; import org.apache.hadoop.hive.druid.DruidStorageHandlerUtils; import org.apache.hadoop.io.NullWritable; @@ -80,7 +79,7 @@ public class DruidTopNQueryRecordReader public DruidWritable getCurrentValue() throws IOException, InterruptedException { // Create new value DruidWritable value = new DruidWritable(); - value.getValue().put(DruidTable.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); + value.getValue().put(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); if (values.hasNext()) { value.getValue().putAll(values.next().getBaseObject()); return value; @@ -93,7 +92,7 @@ public class DruidTopNQueryRecordReader if (nextKeyValue()) { // Update value value.getValue().clear(); - value.getValue().put(DruidTable.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); + value.getValue().put(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, current.getTimestamp().getMillis()); if (values.hasNext()) { value.getValue().putAll(values.next().getBaseObject()); } http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java index 2aeb279..fb15830 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java @@ -139,7 +139,7 @@ public class TestHiveDruidQueryBasedInputFormat extends TestCase { private static final String TIMESERIES_QUERY = "{ \"queryType\": \"timeseries\", " + " \"dataSource\": \"sample_datasource\", " - + " \"granularity\": \"day\", " + + " \"granularity\": \"DAY\", " + " \"descending\": \"true\", " + " \"intervals\": [ \"2012-01-01T00:00:00.000/2012-01-03T00:00:00.000\" ]}"; private static final String TIMESERIES_QUERY_SPLIT = @@ -149,7 +149,7 @@ public class TestHiveDruidQueryBasedInputFormat extends TestCase { + "\"descending\":true," + "\"virtualColumns\":[]," + "\"filter\":null," - + "\"granularity\":{\"type\":\"period\",\"period\":\"P1D\",\"timeZone\":\"America/Los_Angeles\",\"origin\":null}," + + "\"granularity\":\"DAY\"," + "\"aggregations\":[]," + "\"postAggregations\":[]," + "\"context\":null}, [localhost:8082]}]"; @@ -213,7 +213,7 @@ public class TestHiveDruidQueryBasedInputFormat extends TestCase { + "\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2012-01-01T00:00:00.000-08:00/2012-01-03T00:00:00.000-08:00\"]}," + "\"virtualColumns\":[]," + "\"filter\":null," - + "\"granularity\":{\"type\":\"period\",\"period\":\"P1D\",\"timeZone\":\"America/Los_Angeles\",\"origin\":null}," + + "\"granularity\":\"DAY\"," + "\"dimensions\":[{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"country\",\"outputName\":\"country\",\"outputType\":\"STRING\"}," + "{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"device\",\"outputName\":\"device\",\"outputType\":\"STRING\"}]," + "\"aggregations\":[{\"type\":\"longSum\",\"name\":\"total_usage\",\"fieldName\":\"user_count\",\"expression\":null}," http://git-wip-us.apache.org/repos/asf/hive/blob/5f26f393/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java ---------------------------------------------------------------------- diff --git a/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java index 4962e0b..af75bfb 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java @@ -87,21 +87,21 @@ public class TestDruidRecordWriter { final List> expectedRows = ImmutableList.of( ImmutableMap.of( - DruidTable.DEFAULT_TIMESTAMP_COLUMN, + DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, DateTime.parse("2014-10-22T00:00:00.000Z").getMillis(), "host", ImmutableList.of("a.example.com"), "visited_sum", 190L, "unique_hosts", 1.0d ), ImmutableMap.of( - DruidTable.DEFAULT_TIMESTAMP_COLUMN, + DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, DateTime.parse("2014-10-22T01:00:00.000Z").getMillis(), "host", ImmutableList.of("b.example.com"), "visited_sum", 175L, "unique_hosts", 1.0d ), ImmutableMap.of( - DruidTable.DEFAULT_TIMESTAMP_COLUMN, + DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, DateTime.parse("2014-10-22T02:00:00.000Z").getMillis(), "host", ImmutableList.of("c.example.com"), "visited_sum", 270L, @@ -109,6 +109,13 @@ public class TestDruidRecordWriter { ) ); + + @Test + public void testTimeStampColumnName() { + Assert.assertEquals("Time column name need to match to ensure serdeser compatibility", + DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, DruidTable.DEFAULT_TIMESTAMP_COLUMN + ); + } // This test fails due to conflict of guava classes with hive-exec jar. @Ignore @Test @@ -120,7 +127,7 @@ public class TestDruidRecordWriter { Configuration config = new Configuration(); final InputRowParser inputRowParser = new MapInputRowParser(new TimeAndDimsParseSpec( - new TimestampSpec(DruidTable.DEFAULT_TIMESTAMP_COLUMN, "auto", null), + new TimestampSpec(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN, "auto", null), new DimensionsSpec(ImmutableList.of(new StringDimensionSchema("host")), null, null ) @@ -169,7 +176,7 @@ public class TestDruidRecordWriter { .put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME, Granularities.DAY.bucketStart( new DateTime((long) input - .get(DruidTable.DEFAULT_TIMESTAMP_COLUMN))) + .get(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN))) .getMillis() ).build()); } @@ -217,7 +224,7 @@ public class TestDruidRecordWriter { Assert.assertEquals(ImmutableList.of("host"), actual.getDimensions()); - Assert.assertEquals(expected.get(DruidTable.DEFAULT_TIMESTAMP_COLUMN), + Assert.assertEquals(expected.get(DruidStorageHandlerUtils.DEFAULT_TIMESTAMP_COLUMN), actual.getTimestamp().getMillis() ); Assert.assertEquals(expected.get("host"), actual.getDimension("host"));