kylin-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Commented] (KYLIN-3597) Fix sonar reported static code issues
Date Tue, 09 Oct 2018 01:55:00 GMT

    [ https://issues.apache.org/jira/browse/KYLIN-3597?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16642688#comment-16642688
] 

ASF GitHub Bot commented on KYLIN-3597:
---------------------------------------

shaofengshi closed pull request #282: KYLIN-3597 Close resources after they are used.
URL: https://github.com/apache/kylin/pull/282
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceDAO.java
b/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceDAO.java
index dce0894a39..70a049b8a7 100644
--- a/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceDAO.java
+++ b/core-common/src/main/java/org/apache/kylin/common/persistence/JDBCResourceDAO.java
@@ -442,9 +442,11 @@ public void execute(Connection connection) throws SQLException {
             }
 
             private boolean checkTableExists(final String tableName, final Connection connection)
throws SQLException {
-                final PreparedStatement ps = connection.prepareStatement(getCheckTableExistsSql(tableName));
-                final ResultSet rs = ps.executeQuery();
+                PreparedStatement ps = null;
+                ResultSet rs = null;
                 try {
+                    ps = connection.prepareStatement(getCheckTableExistsSql(tableName));
+                    rs = ps.executeQuery();
                     while (rs.next()) {
                         if (tableName.equals(rs.getString(1))) {
                             return true;
diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
index 96690d00de..fd32db5e70 100644
--- a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
+++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/SparkCubeHFile.java
@@ -180,58 +180,59 @@ protected void execute(OptionsHelper optionsHelper) throws Exception
{
 
             //HBase conf
             logger.info("Loading HBase configuration from:{}", hbaseConfFile);
-            FSDataInputStream confInput = fs.open(new Path(hbaseConfFile));
-
-            Configuration hbaseJobConf = new Configuration();
-            hbaseJobConf.addResource(confInput);
-            hbaseJobConf.set("spark.hadoop.dfs.replication", "3"); // HFile, replication=3
-            Job job = Job.getInstance(hbaseJobConf, cubeSegment.getStorageLocationIdentifier());
-
-            FileOutputFormat.setOutputPath(job, new Path(outputPath));
-
-            JavaPairRDD<Text, Text> inputRDDs = SparkUtil.parseInputPath(inputPath,
fs, sc, Text.class, Text.class);
-            final JavaPairRDD<RowKeyWritable, KeyValue> hfilerdd;
-            if (quickPath) {
-                hfilerdd = inputRDDs.mapToPair(new PairFunction<Tuple2<Text, Text>,
RowKeyWritable, KeyValue>() {
-                    @Override
-                    public Tuple2<RowKeyWritable, KeyValue> call(Tuple2<Text, Text>
textTextTuple2) throws Exception {
-                        KeyValue outputValue = keyValueCreators.get(0).create(textTextTuple2._1,
-                                textTextTuple2._2.getBytes(), 0, textTextTuple2._2.getLength());
-                        return new Tuple2<>(new RowKeyWritable(outputValue.createKeyOnly(false).getKey()),
outputValue);
-                    }
-                });
-            } else {
-                hfilerdd = inputRDDs.flatMapToPair(new PairFlatMapFunction<Tuple2<Text,
Text>, RowKeyWritable, KeyValue>() {
-                    @Override
-                    public Iterator<Tuple2<RowKeyWritable, KeyValue>> call(Tuple2<Text,
Text> textTextTuple2)
-                            throws Exception {
-
-                        List<Tuple2<RowKeyWritable, KeyValue>> result = Lists.newArrayListWithExpectedSize(cfNum);
-                        Object[] inputMeasures = new Object[cubeDesc.getMeasures().size()];
-                        inputCodec.decode(ByteBuffer.wrap(textTextTuple2._2.getBytes(), 0,
textTextTuple2._2.getLength()),
-                                inputMeasures);
-
-                        for (int i = 0; i < cfNum; i++) {
-                            KeyValue outputValue = keyValueCreators.get(i).create(textTextTuple2._1,
inputMeasures);
-                            result.add(new Tuple2<>(new RowKeyWritable(outputValue.createKeyOnly(false).getKey()),
-                                    outputValue));
-                        }
 
-                        return result.iterator();
-                    }
-                });
-            }
+            try (FSDataInputStream confInput = fs.open(new Path(hbaseConfFile))) {
+                Configuration hbaseJobConf = new Configuration();
+                hbaseJobConf.addResource(confInput);
+                hbaseJobConf.set("spark.hadoop.dfs.replication", "3"); // HFile, replication=3
+                Job job = Job.getInstance(hbaseJobConf, cubeSegment.getStorageLocationIdentifier());
+
+                FileOutputFormat.setOutputPath(job, new Path(outputPath));
 
-            hfilerdd.repartitionAndSortWithinPartitions(new HFilePartitioner(keys),
-                    RowKeyWritable.RowKeyComparator.INSTANCE)
-                    .mapToPair(new PairFunction<Tuple2<RowKeyWritable, KeyValue>,
ImmutableBytesWritable, KeyValue>() {
+                JavaPairRDD<Text, Text> inputRDDs = SparkUtil.parseInputPath(inputPath,
fs, sc, Text.class, Text.class);
+                final JavaPairRDD<RowKeyWritable, KeyValue> hfilerdd;
+                if (quickPath) {
+                    hfilerdd = inputRDDs.mapToPair(new PairFunction<Tuple2<Text, Text>,
RowKeyWritable, KeyValue>() {
                         @Override
-                        public Tuple2<ImmutableBytesWritable, KeyValue> call(
-                                Tuple2<RowKeyWritable, KeyValue> rowKeyWritableKeyValueTuple2)
throws Exception {
-                            return new Tuple2<>(new ImmutableBytesWritable(rowKeyWritableKeyValueTuple2._2.getKey()),
-                                    rowKeyWritableKeyValueTuple2._2);
+                        public Tuple2<RowKeyWritable, KeyValue> call(Tuple2<Text,
Text> textTextTuple2) throws Exception {
+                            KeyValue outputValue = keyValueCreators.get(0).create(textTextTuple2._1,
+                                    textTextTuple2._2.getBytes(), 0, textTextTuple2._2.getLength());
+                            return new Tuple2<>(new RowKeyWritable(outputValue.createKeyOnly(false).getKey()),
outputValue);
                         }
-                    }).saveAsNewAPIHadoopDataset(job.getConfiguration());
+                    });
+                } else {
+                    hfilerdd = inputRDDs.flatMapToPair(new PairFlatMapFunction<Tuple2<Text,
Text>, RowKeyWritable, KeyValue>() {
+                        @Override
+                        public Iterator<Tuple2<RowKeyWritable, KeyValue>> call(Tuple2<Text,
Text> textTextTuple2)
+                                throws Exception {
+
+                            List<Tuple2<RowKeyWritable, KeyValue>> result = Lists.newArrayListWithExpectedSize(cfNum);
+                            Object[] inputMeasures = new Object[cubeDesc.getMeasures().size()];
+                            inputCodec.decode(ByteBuffer.wrap(textTextTuple2._2.getBytes(),
0, textTextTuple2._2.getLength()),
+                                    inputMeasures);
+
+                            for (int i = 0; i < cfNum; i++) {
+                                KeyValue outputValue = keyValueCreators.get(i).create(textTextTuple2._1,
inputMeasures);
+                                result.add(new Tuple2<>(new RowKeyWritable(outputValue.createKeyOnly(false).getKey()),
+                                        outputValue));
+                            }
+
+                            return result.iterator();
+                        }
+                    });
+                }
+
+                hfilerdd.repartitionAndSortWithinPartitions(new HFilePartitioner(keys),
+                        RowKeyWritable.RowKeyComparator.INSTANCE)
+                        .mapToPair(new PairFunction<Tuple2<RowKeyWritable, KeyValue>,
ImmutableBytesWritable, KeyValue>() {
+                            @Override
+                            public Tuple2<ImmutableBytesWritable, KeyValue> call(
+                                    Tuple2<RowKeyWritable, KeyValue> rowKeyWritableKeyValueTuple2)
throws Exception {
+                                return new Tuple2<>(new ImmutableBytesWritable(rowKeyWritableKeyValueTuple2._2.getKey()),
+                                        rowKeyWritableKeyValueTuple2._2);
+                            }
+                        }).saveAsNewAPIHadoopDataset(job.getConfiguration());
+            }
 
             logger.info("HDFS: Number of bytes written={}", jobListener.metrics.getBytesWritten());
 


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


> Fix sonar reported static code issues
> -------------------------------------
>
>                 Key: KYLIN-3597
>                 URL: https://issues.apache.org/jira/browse/KYLIN-3597
>             Project: Kylin
>          Issue Type: Improvement
>          Components: Others
>            Reporter: Shaofeng SHI
>            Priority: Major
>             Fix For: v2.6.0
>
>




--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Mime
View raw message