Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 3F8ED200C09 for ; Tue, 20 Dec 2016 12:26:01 +0100 (CET) Received: by cust-asf.ponee.io (Postfix) id 3E5B2160B3D; Tue, 20 Dec 2016 11:26:01 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 9577D160B1B for ; Tue, 20 Dec 2016 12:25:59 +0100 (CET) Received: (qmail 62889 invoked by uid 500); 20 Dec 2016 11:25:58 -0000 Mailing-List: contact commits-help@kylin.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@kylin.apache.org Delivered-To: mailing list commits@kylin.apache.org Received: (qmail 62394 invoked by uid 99); 20 Dec 2016 11:25:58 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 20 Dec 2016 11:25:58 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 10EB8DFE93; Tue, 20 Dec 2016 11:25:58 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: lidong@apache.org To: commits@kylin.apache.org Date: Tue, 20 Dec 2016 11:26:10 -0000 Message-Id: In-Reply-To: <623cf4901ba6496a9e1f490dec1d3710@git.apache.org> References: <623cf4901ba6496a9e1f490dec1d3710@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [14/50] [abbrv] kylin git commit: KYLIN-1832 code review archived-at: Tue, 20 Dec 2016 11:26:01 -0000 http://git-wip-us.apache.org/repos/asf/kylin/blob/e6e330a8/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java ---------------------------------------------------------------------- diff --git a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java index 76212c8..6e894dd 100644 --- a/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java +++ b/engine-spark/src/main/java/org/apache/kylin/engine/spark/SparkCubing.java @@ -83,7 +83,7 @@ import org.apache.kylin.engine.spark.cube.DefaultTupleConverter; import org.apache.kylin.engine.spark.util.IteratorUtils; import org.apache.kylin.measure.BufferedMeasureCodec; import org.apache.kylin.measure.MeasureAggregators; -import org.apache.kylin.measure.hllc.HyperLogLogPlusCounterNew; +import org.apache.kylin.measure.hllc.HLLCounter; import org.apache.kylin.metadata.model.FunctionDesc; import org.apache.kylin.metadata.model.IJoinedFlatTableDesc; import org.apache.kylin.metadata.model.MeasureDesc; @@ -241,15 +241,15 @@ public class SparkCubing extends AbstractApplication { } } - private Map sampling(final JavaRDD> rowJavaRDD, final String cubeName, String segmentId) throws Exception { + private Map sampling(final JavaRDD> rowJavaRDD, final String cubeName, String segmentId) throws Exception { CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).reloadCubeLocal(cubeName); CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId); CubeDesc cubeDesc = cubeInstance.getDescriptor(); CuboidScheduler cuboidScheduler = new CuboidScheduler(cubeDesc); List allCuboidIds = cuboidScheduler.getAllCuboidIds(); - final HashMap zeroValue = Maps.newHashMap(); + final HashMap zeroValue = Maps.newHashMap(); for (Long id : allCuboidIds) { - zeroValue.put(id, new HyperLogLogPlusCounterNew(cubeDesc.getConfig().getCubeStatsHLLPrecision())); + zeroValue.put(id, new HLLCounter(cubeDesc.getConfig().getCubeStatsHLLPrecision())); } CubeJoinedFlatTableEnrich flatDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc); @@ -278,12 +278,12 @@ public class SparkCubing extends AbstractApplication { row_hashcodes[i] = new ByteArray(); } - final HashMap samplingResult = rowJavaRDD.aggregate(zeroValue, new Function2, List, HashMap>() { + final HashMap samplingResult = rowJavaRDD.aggregate(zeroValue, new Function2, List, HashMap>() { final HashFunction hashFunction = Hashing.murmur3_128(); @Override - public HashMap call(HashMap v1, List v2) throws Exception { + public HashMap call(HashMap v1, List v2) throws Exception { for (int i = 0; i < nRowKey; i++) { Hasher hc = hashFunction.newHasher(); String colValue = v2.get(rowKeyColumnIndexes[i]); @@ -296,7 +296,7 @@ public class SparkCubing extends AbstractApplication { for (Map.Entry entry : allCuboidsBitSet.entrySet()) { Hasher hc = hashFunction.newHasher(); - HyperLogLogPlusCounterNew counter = v1.get(entry.getKey()); + HLLCounter counter = v1.get(entry.getKey()); final Integer[] cuboidBitSet = entry.getValue(); for (int position = 0; position < cuboidBitSet.length; position++) { hc.putBytes(row_hashcodes[cuboidBitSet[position]].array()); @@ -305,14 +305,14 @@ public class SparkCubing extends AbstractApplication { } return v1; } - }, new Function2, HashMap, HashMap>() { + }, new Function2, HashMap, HashMap>() { @Override - public HashMap call(HashMap v1, HashMap v2) throws Exception { + public HashMap call(HashMap v1, HashMap v2) throws Exception { Preconditions.checkArgument(v1.size() == v2.size()); Preconditions.checkArgument(v1.size() > 0); - for (Map.Entry entry : v1.entrySet()) { - final HyperLogLogPlusCounterNew counter1 = entry.getValue(); - final HyperLogLogPlusCounterNew counter2 = v2.get(entry.getKey()); + for (Map.Entry entry : v1.entrySet()) { + final HLLCounter counter1 = entry.getValue(); + final HLLCounter counter2 = v2.get(entry.getKey()); counter1.merge(Preconditions.checkNotNull(counter2, "counter cannot be null")); } return v1; @@ -470,7 +470,7 @@ public class SparkCubing extends AbstractApplication { ClassUtil.addClasspath(confPath); } - private byte[][] createHTable(String cubeName, String segmentId, Map samplingResult) throws Exception { + private byte[][] createHTable(String cubeName, String segmentId, Map samplingResult) throws Exception { final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName); final CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId); @@ -614,7 +614,7 @@ public class SparkCubing extends AbstractApplication { } }); - final Map samplingResult = sampling(rowJavaRDD, cubeName, segmentId); + final Map samplingResult = sampling(rowJavaRDD, cubeName, segmentId); final byte[][] splitKeys = createHTable(cubeName, segmentId, samplingResult); final String hfile = build(rowJavaRDD, cubeName, segmentId, splitKeys); http://git-wip-us.apache.org/repos/asf/kylin/blob/e6e330a8/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java ---------------------------------------------------------------------- diff --git a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java index 230249f..f046f78 100644 --- a/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java +++ b/source-hive/src/main/java/org/apache/kylin/source/hive/cardinality/ColumnCardinalityMapper.java @@ -35,7 +35,7 @@ import org.apache.kylin.engine.mr.MRUtil; import org.apache.kylin.engine.mr.common.AbstractHadoopJob; import org.apache.kylin.engine.mr.common.BatchConstants; import org.apache.kylin.measure.BufferedMeasureCodec; -import org.apache.kylin.measure.hllc.HyperLogLogPlusCounterNew; +import org.apache.kylin.measure.hllc.HLLCounter; import org.apache.kylin.metadata.MetadataManager; import org.apache.kylin.metadata.model.ColumnDesc; import org.apache.kylin.metadata.model.TableDesc; @@ -46,7 +46,7 @@ import org.apache.kylin.metadata.model.TableDesc; */ public class ColumnCardinalityMapper extends KylinMapper { - private Map hllcMap = new HashMap(); + private Map hllcMap = new HashMap(); public static final String DEFAULT_DELIM = ","; private int counter = 0; @@ -87,9 +87,9 @@ public class ColumnCardinalityMapper extends KylinMapper extends KylinMapper { public static final int ONE = 1; - private Map hllcMap = new HashMap(); + private Map hllcMap = new HashMap(); @Override protected void setup(Context context) throws IOException { @@ -53,16 +53,16 @@ public class ColumnCardinalityReducer extends KylinReducer