phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sama...@apache.org
Subject [30/50] [abbrv] phoenix git commit: PHOENIX-3661 Make phoenix tool select file system dynamically (Yishan Yang)
Date Tue, 14 Feb 2017 23:42:19 GMT
PHOENIX-3661 Make phoenix tool select file system dynamically (Yishan Yang)


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/f48aa81a
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/f48aa81a
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/f48aa81a

Branch: refs/heads/encodecolumns2
Commit: f48aa81a02f5e8830dc821d23618f579453ab733
Parents: 234e427
Author: Andrew Purtell <apurtell@apache.org>
Authored: Mon Feb 13 15:24:01 2017 -0800
Committer: Andrew Purtell <apurtell@apache.org>
Committed: Mon Feb 13 15:25:37 2017 -0800

----------------------------------------------------------------------
 .../apache/phoenix/mapreduce/AbstractBulkLoadTool.java  |  2 +-
 .../phoenix/mapreduce/MultiHfileOutputFormat.java       |  2 +-
 .../org/apache/phoenix/mapreduce/index/IndexTool.java   | 12 ++++++++----
 3 files changed, 10 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/f48aa81a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/AbstractBulkLoadTool.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/AbstractBulkLoadTool.java
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/AbstractBulkLoadTool.java
index f7b7d22..9cb54ef 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/AbstractBulkLoadTool.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/AbstractBulkLoadTool.java
@@ -328,7 +328,7 @@ public abstract class AbstractBulkLoadTool extends Configured implements
Tool {
             LOG.info("Loading HFiles from {}", outputPath);
             completebulkload(conf,outputPath,tablesToBeLoaded);
             LOG.info("Removing output directory {}", outputPath);
-            if(!FileSystem.get(conf).delete(outputPath, true)) {
+            if(!outputPath.getFileSystem(conf).delete(outputPath, true)) {
                 LOG.error("Failed to delete the output directory {}", outputPath);
             }
             return 0;

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f48aa81a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/MultiHfileOutputFormat.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/MultiHfileOutputFormat.java
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/MultiHfileOutputFormat.java
index f48a690..9c19a52 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/MultiHfileOutputFormat.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/MultiHfileOutputFormat.java
@@ -454,8 +454,8 @@ public class MultiHfileOutputFormat extends FileOutputFormat<TableRowkeyPair,
Ce
         
         Configuration conf = job.getConfiguration();
         // create the partitions file
-        FileSystem fs = FileSystem.get(conf);
         Path partitionsPath = new Path(conf.get("hadoop.tmp.dir"), "partitions_" + UUID.randomUUID());
+        FileSystem fs = partitionsPath.getFileSystem(conf);
         fs.makeQualified(partitionsPath);
         writePartitions(conf, partitionsPath, tablesStartKeys);
         fs.deleteOnExit(partitionsPath);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/f48aa81a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
----------------------------------------------------------------------
diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
index 3349cf3..cb649d1 100644
--- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/index/IndexTool.java
@@ -194,6 +194,7 @@ public class IndexTool extends Configured implements Tool {
         Connection connection;
         Configuration configuration;
         private Path outputPath;
+        private FileSystem fs;
 
         public JobFactory(Connection connection, Configuration configuration, Path outputPath)
{
             this.connection = connection;
@@ -357,8 +358,9 @@ public class IndexTool extends Configured implements Tool {
             final List<ColumnInfo> columnMetadataList =
                     PhoenixRuntime.generateColumnInfo(connection, qIndexTable, indexColumns);
             ColumnInfoToStringEncoderDecoder.encode(configuration, columnMetadataList);
-            FileSystem.get(configuration).delete(outputPath, true);
-            
+            fs = outputPath.getFileSystem(configuration);
+            fs.delete(outputPath, true);           
+ 
             final String jobName = String.format(INDEX_JOB_NAME_TEMPLATE, pdataTable.getName().toString(),
indexTable);
             final Job job = Job.getInstance(configuration, jobName);
             job.setJarByClass(IndexTool.class);
@@ -475,10 +477,12 @@ public class IndexTool extends Configured implements Tool {
             
             PTable pdataTable = PhoenixRuntime.getTableNoCache(connection, qDataTable);
 			Path outputPath = null;
+			FileSystem fs = null;
 			if (basePath != null) {
 				outputPath = CsvBulkImportUtil.getOutputPath(new Path(basePath), pindexTable == null
 						? pdataTable.getPhysicalName().getString() : pindexTable.getPhysicalName().getString());
-				FileSystem.get(configuration).delete(outputPath, true);
+				fs = outputPath.getFileSystem(configuration);
+				fs.delete(outputPath, true);
 			}
             
             Job job = new JobFactory(connection, configuration, outputPath).getJob(schemaName,
indexTable, dataTable,
@@ -502,7 +506,7 @@ public class IndexTool extends Configured implements Tool {
                     htable.close();
                     // Without direct API, we need to update the index state to ACTIVE from
client.
                     IndexToolUtil.updateIndexState(connection, qDataTable, indexTable, PIndexState.ACTIVE);
-                    FileSystem.get(configuration).delete(outputPath, true);
+                    fs.delete(outputPath, true);
                 }
                 return 0;
             } else {


Mime
View raw message