hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From na...@apache.org
Subject svn commit: r1055760 [2/2] - in /hive/trunk: ./ metastore/if/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen/thrift/gen-php/hive_metastore/ metastore/src/gen/thrift/gen-py...
Date Thu, 06 Jan 2011 06:43:46 GMT
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1055760&r1=1055759&r2=1055760&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Thu Jan
 6 06:43:44 2011
@@ -18,13 +18,10 @@
 
 package org.apache.hadoop.hive.ql.parse;
 
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_CREATEDATABASE;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASECOMMENT;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DROPDATABASE;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFEXISTS;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFNOTEXISTS;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_SHOWDATABASES;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_SWITCHDATABASE;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -64,6 +61,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
+import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
 import org.apache.hadoop.hive.ql.plan.AlterIndexDesc.AlterIndexTypes;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
@@ -159,7 +157,8 @@ public class DDLSemanticAnalyzer extends
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
 
-    if(ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PARTITION) {
+    switch(ast.getToken().getType()) {
+    case HiveParser.TOK_ALTERTABLE_PARTITION: {
       TablePartition tblPart = new TablePartition((ASTNode)ast.getChild(0));
       String tableName = tblPart.tableName;
       HashMap<String, String> partSpec = tblPart.partSpec;
@@ -171,94 +170,158 @@ public class DDLSemanticAnalyzer extends
       } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_LOCATION) {
         analyzeAlterTableLocation(ast, tableName, partSpec);
       }
-    } else if (ast.getToken().getType() == HiveParser.TOK_DROPTABLE) {
+      break;
+    }
+    case HiveParser.TOK_DROPTABLE:
       analyzeDropTable(ast, false);
-    } else if (ast.getToken().getType() == HiveParser.TOK_CREATEINDEX) {
+      break;
+    case HiveParser.TOK_CREATEINDEX:
       analyzeCreateIndex(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_DROPINDEX) {
+      break;
+    case HiveParser.TOK_DROPINDEX:
       analyzeDropIndex(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_DESCTABLE) {
+      break;
+    case HiveParser.TOK_DESCTABLE:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeDescribeTable(ast);
-    } else if (ast.getToken().getType() == TOK_SHOWDATABASES) {
+      break;
+    case TOK_SHOWDATABASES:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowDatabases(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_SHOWTABLES) {
+      break;
+    case HiveParser.TOK_SHOWTABLES:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowTables(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_SHOW_TABLESTATUS) {
+      break;
+    case HiveParser.TOK_SHOW_TABLESTATUS:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowTableStatus(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_SHOWFUNCTIONS) {
+      break;
+    case HiveParser.TOK_SHOWFUNCTIONS:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowFunctions(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_SHOWLOCKS) {
+      break;
+    case HiveParser.TOK_SHOWLOCKS:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowLocks(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_DESCFUNCTION) {
+      break;
+    case HiveParser.TOK_DESCFUNCTION:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeDescFunction(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_DESCDATABASE) {
+      break;
+    case HiveParser.TOK_DESCDATABASE:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeDescDatabase(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_MSCK) {
+      break;
+    case HiveParser.TOK_MSCK:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeMetastoreCheck(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_DROPVIEW) {
+      break;
+    case HiveParser.TOK_DROPVIEW:
       analyzeDropTable(ast, true);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERVIEW_PROPERTIES) {
+      break;
+    case HiveParser.TOK_ALTERVIEW_PROPERTIES:
       analyzeAlterTableProps(ast, true);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_RENAME:
       analyzeAlterTableRename(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_TOUCH) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_TOUCH:
       analyzeAlterTableTouch(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ARCHIVE) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_ARCHIVE:
       analyzeAlterTableArchive(ast, false);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_UNARCHIVE) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
       analyzeAlterTableArchive(ast, true);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_ADDCOLS:
       analyzeAlterTableModifyCols(ast, AlterTableTypes.ADDCOLS);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
       analyzeAlterTableModifyCols(ast, AlterTableTypes.REPLACECOLS);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_RENAMECOL:
       analyzeAlterTableRenameCol(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_ADDPARTS:
       analyzeAlterTableAddParts(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_DROPPARTS:
       analyzeAlterTableDropParts(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_PROPERTIES) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_PROPERTIES:
       analyzeAlterTableProps(ast, false);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
       analyzeAlterTableSerdeProps(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_SERIALIZER:
       analyzeAlterTableSerde(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_CLUSTER_SORT) {
+      break;
+    case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
       analyzeAlterTableClusterSort(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERINDEX_REBUILD) {
+      break;
+    case HiveParser.TOK_ALTERINDEX_REBUILD:
       analyzeAlterIndexRebuild(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERINDEX_PROPERTIES) {
+      break;
+    case HiveParser.TOK_ALTERINDEX_PROPERTIES:
       analyzeAlterIndexProps(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_SHOWPARTITIONS) {
+      break;
+    case HiveParser.TOK_SHOWPARTITIONS:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowPartitions(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_SHOWINDEXES) {
+      break;
+    case HiveParser.TOK_SHOWINDEXES:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowIndexes(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_LOCKTABLE) {
+      break;
+    case HiveParser.TOK_LOCKTABLE:
       analyzeLockTable(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_UNLOCKTABLE) {
+      break;
+    case HiveParser.TOK_UNLOCKTABLE:
       analyzeUnlockTable(ast);
-    } else if (ast.getToken().getType() == TOK_CREATEDATABASE) {
+      break;
+    case HiveParser.TOK_CREATEDATABASE:
       analyzeCreateDatabase(ast);
-    } else if (ast.getToken().getType() == TOK_DROPDATABASE) {
+      break;
+    case HiveParser.TOK_DROPDATABASE:
       analyzeDropDatabase(ast);
-    } else if (ast.getToken().getType() == TOK_SWITCHDATABASE) {
+      break;
+    case HiveParser.TOK_SWITCHDATABASE:
       analyzeSwitchDatabase(ast);
-    } else {
+      break;
+    case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
+      analyzeAlterDatabase(ast);
+      break;
+    default:
       throw new SemanticException("Unsupported command.");
     }
   }
 
+  private void analyzeAlterDatabase(ASTNode ast) throws SemanticException {
+
+    String dbName = unescapeIdentifier(ast.getChild(0).getText());
+    Map<String, String> dbProps = null;
+
+    for (int i = 1; i < ast.getChildCount(); i++) {
+      ASTNode childNode = (ASTNode) ast.getChild(i);
+      switch (childNode.getToken().getType()) {
+      case HiveParser.TOK_DATABASEPROPERTIES:
+        dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0));
+        break;
+      default:
+        throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
+      }
+    }
+
+    // currently alter database command can only change properties
+    AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(dbName, null, null, false);
+    alterDesc.setDatabaseProperties(dbProps);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc),
+        conf));
+
+  }
   private void analyzeCreateDatabase(ASTNode ast) throws SemanticException {
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
     boolean ifNotExists = false;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1055760&r1=1055759&r2=1055760&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Thu Jan  6 06:43:44 2011
@@ -195,6 +195,7 @@ TOK_INDEXCOMMENT;
 TOK_DESCDATABASE;
 TOK_DATABASEPROPERTIES;
 TOK_DBPROPLIST;
+TOK_ALTERDATABASE_PROPERTIES;
 }
 
 
@@ -448,6 +449,8 @@ alterStatement
             KW_VIEW! alterViewStatementSuffix
         |
             KW_INDEX! alterIndexStatementSuffix
+        |
+            KW_DATABASE! alterDatabaseStatementSuffix
         )
     ;
 
@@ -490,6 +493,19 @@ alterIndexStatementSuffix
     )
     ;
 
+alterDatabaseStatementSuffix
+@init { msgs.push("alter database statement"); }
+@after { msgs.pop(); }
+    : alterDatabaseSuffixProperties
+    ;
+    
+alterDatabaseSuffixProperties
+@init { msgs.push("alter database properties statement"); }
+@after { msgs.pop(); }
+    : name=Identifier KW_SET KW_DBPROPERTIES dbProperties
+    -> ^(TOK_ALTERDATABASE_PROPERTIES $name dbProperties)
+    ;
+
 alterStatementSuffixRename
 @init { msgs.push("rename statement"); }
 @after { msgs.pop(); }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1055760&r1=1055759&r2=1055760&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Thu
Jan  6 06:43:44 2011
@@ -38,10 +38,12 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_CREATEDATABASE, "CREATEDATABASE");
     commandType.put(HiveParser.TOK_DROPDATABASE, "DROPDATABASE");
     commandType.put(HiveParser.TOK_SWITCHDATABASE, "SWITCHDATABASE");
+    commandType.put(HiveParser.TOK_ALTERDATABASE_PROPERTIES, "ALTERDATABASE");
     commandType.put(HiveParser.TOK_CREATETABLE, "CREATETABLE");
     commandType.put(HiveParser.TOK_DROPTABLE, "DROPTABLE");
     commandType.put(HiveParser.TOK_DESCTABLE, "DESCTABLE");
     commandType.put(HiveParser.TOK_DESCFUNCTION, "DESCFUNCTION");
+    commandType.put(HiveParser.TOK_DESCDATABASE, "DESCDATABASE");
     commandType.put(HiveParser.TOK_MSCK, "MSCK");
     commandType.put(HiveParser.TOK_ALTERTABLE_ADDCOLS, "ALTERTABLE_ADDCOLS");
     commandType.put(HiveParser.TOK_ALTERTABLE_REPLACECOLS, "ALTERTABLE_REPLACECOLS");
@@ -134,6 +136,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_ALTERTABLE_UNARCHIVE:
       case HiveParser.TOK_LOCKTABLE:
       case HiveParser.TOK_UNLOCKTABLE:
+      case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
         return new DDLSemanticAnalyzer(conf);
       case HiveParser.TOK_ALTERTABLE_PARTITION:
         String commandType = null;

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java?rev=1055760&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterDatabaseDesc.java Thu Jan 
6 06:43:44 2011
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.Map;
+
+/**
+ * AlterDatabaseDesc.
+ *
+ */
+@Explain(displayName = "Create Database")
+public class AlterDatabaseDesc extends DDLDesc implements Serializable {
+
+  private static final long serialVersionUID = 1L;
+
+  String databaseName;
+  String locationUri;
+  String comment;
+  boolean ifNotExists;
+  Map<String, String> dbProperties;
+
+  /**
+   * For serialization only.
+   */
+  public AlterDatabaseDesc() {
+  }
+
+  public AlterDatabaseDesc(String databaseName, String comment,
+      String locationUri, boolean ifNotExists) {
+    super();
+    this.databaseName = databaseName;
+    this.comment = comment;
+    this.locationUri = locationUri;
+    this.ifNotExists = ifNotExists;
+    this.dbProperties = null;
+  }
+
+  public AlterDatabaseDesc(String databaseName, boolean ifNotExists) {
+    this(databaseName, null, null, ifNotExists);
+  }
+
+
+
+  @Explain(displayName="if not exists")
+  public boolean getIfNotExists() {
+    return ifNotExists;
+  }
+
+  public void setIfNotExists(boolean ifNotExists) {
+    this.ifNotExists = ifNotExists;
+  }
+
+  public Map<String, String> getDatabaseProperties() {
+    return dbProperties;
+  }
+
+  public void setDatabaseProperties(Map<String, String> dbProps) {
+    this.dbProperties = dbProps;
+  }
+
+  @Explain(displayName="name")
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public void setDatabaseName(String databaseName) {
+    this.databaseName = databaseName;
+  }
+
+  @Explain(displayName="comment")
+  public String getComment() {
+    return comment;
+  }
+
+  public void setComment(String comment) {
+    this.comment = comment;
+  }
+
+  @Explain(displayName="locationUri")
+  public String getLocationUri() {
+    return locationUri;
+  }
+
+  public void setLocationUri(String locationUri) {
+    this.locationUri = locationUri;
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1055760&r1=1055759&r2=1055760&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Thu Jan  6 06:43:44
2011
@@ -57,6 +57,7 @@ public class DDLWork implements Serializ
   private ShowTableStatusDesc showTblStatusDesc;
   private ShowIndexesDesc showIndexesDesc;
   private DescDatabaseDesc descDbDesc;
+  private AlterDatabaseDesc alterDbDesc;
 
   /**
    * ReadEntitites that are passed to the hooks.
@@ -103,6 +104,12 @@ public class DDLWork implements Serializ
     this.descDbDesc = descDatabaseDesc;
   }
 
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      AlterDatabaseDesc alterDbDesc) {
+    this(inputs, outputs);
+    this.alterDbDesc = alterDbDesc;
+  }
+
   public DescDatabaseDesc getDescDatabaseDesc() {
     return descDbDesc;
   }
@@ -740,4 +747,11 @@ public class DDLWork implements Serializ
     this.dropIdxDesc = dropIdxDesc;
   }
 
+  public void setAlterDatabaseDesc(AlterDatabaseDesc alterDbDesc) {
+    this.alterDbDesc = alterDbDesc;
+  }
+
+  public AlterDatabaseDesc getAlterDatabaseDesc() {
+    return this.alterDbDesc;
+  }
 }

Modified: hive/trunk/ql/src/test/queries/clientpositive/database_properties.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/database_properties.q?rev=1055760&r1=1055759&r2=1055760&view=diff
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/database_properties.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/database_properties.q Thu Jan  6 06:43:44
2011
@@ -1,3 +1,6 @@
+set datanucleus.cache.collections=false;
+set datanucleus.cache.collections.lazy=false;
+
 create database db1;
 
 show databases;
@@ -12,4 +15,12 @@ describe database db2;
 describe database extended db2;
 
 
+set datanucleus.cache.collections=false;
+set datanucleus.cache.collections.lazy=false;
+
+alter database db2 set dbproperties (
+  'new.property' = 'some new props',
+  'hive.warehouse.dir' = 'new/warehouse/dir');
+
+describe database extended db2;
 

Modified: hive/trunk/ql/src/test/results/clientpositive/database_properties.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/database_properties.q.out?rev=1055760&r1=1055759&r2=1055760&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/database_properties.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/database_properties.q.out Thu Jan  6 06:43:44
2011
@@ -19,12 +19,25 @@ POSTHOOK: query: create database db2 wit
   'mapred.scratch.dir' = 'hdfs://tmp.dfs.com:50029/tmp')
 POSTHOOK: type: CREATEDATABASE
 PREHOOK: query: describe database db2
-PREHOOK: type: null
+PREHOOK: type: DESCDATABASE
 POSTHOOK: query: describe database db2
-POSTHOOK: type: null
+POSTHOOK: type: DESCDATABASE
 db2		pfile:/data/users/nzhang/work/1/apache-hive/build/ql/test/data/warehouse/db2.db	
 PREHOOK: query: describe database extended db2
-PREHOOK: type: null
+PREHOOK: type: DESCDATABASE
 POSTHOOK: query: describe database extended db2
-POSTHOOK: type: null
+POSTHOOK: type: DESCDATABASE
 db2		pfile:/data/users/nzhang/work/1/apache-hive/build/ql/test/data/warehouse/db2.db	{mapred.jobtracker.url=http://my.jobtracker.com:53000,
mapred.scratch.dir=hdfs://tmp.dfs.com:50029/tmp, hive.warehouse.dir=/user/hive/warehouse}
+PREHOOK: query: alter database db2 set dbproperties (
+  'new.property' = 'some new props',
+  'hive.warehouse.dir' = 'new/warehouse/dir')
+PREHOOK: type: ALTERDATABASE
+POSTHOOK: query: alter database db2 set dbproperties (
+  'new.property' = 'some new props',
+  'hive.warehouse.dir' = 'new/warehouse/dir')
+POSTHOOK: type: ALTERDATABASE
+PREHOOK: query: describe database extended db2
+PREHOOK: type: DESCDATABASE
+POSTHOOK: query: describe database extended db2
+POSTHOOK: type: DESCDATABASE
+db2		pfile:/data/users/nzhang/work/1/apache-hive/build/ql/test/data/warehouse/db2.db	{mapred.jobtracker.url=http://my.jobtracker.com:53000,
new.property=some new props, mapred.scratch.dir=hdfs://tmp.dfs.com:50029/tmp, hive.warehouse.dir=new/warehouse/dir}



Mime
View raw message