Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
  
  
  package org.apache.hadoop.hive.ql.parse;
  
  import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASECOMMENT;
  import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFEXISTS;
  import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_IFNOTEXISTS;
  import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_SHOWDATABASES;
  
  import java.util.HashMap;
  import java.util.HashSet;
  import java.util.List;
  import java.util.Map;
  import java.util.Set;
  
  import  org.apache.hadoop.fs.Path;
  import  org.apache.hadoop.hive.metastore.Warehouse;
  import  org.apache.hadoop.hive.metastore.api.FieldSchema;
  import  org.apache.hadoop.hive.metastore.api.Index;
  import  org.apache.hadoop.hive.metastore.api.MetaException;
  import  org.apache.hadoop.hive.metastore.api.Order;
  import  org.apache.hadoop.hive.metastore.api.PrincipalType;
 import  org.apache.hadoop.mapred.TextInputFormat;

DDLSemanticAnalyzer.
 
 public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
   private static final Log LOG = LogFactory.getLog("hive.ql.parse.DDLSemanticAnalyzer");
   public static final Map<IntegerStringTokenToTypeName = new HashMap<IntegerString>();
 
   public static final Set<StringreservedPartitionValues = new HashSet<String>();
   static {
   }
 
   public static String getTypeName(int tokenthrows SemanticException {
     // date, datetime, and timestamp types aren't currently supported
     if (token == . || token == . ||
         token == .) {
     }
     return .get(token);
   }
 
   static class TablePartition {
     String tableName;
     HashMap<StringStringpartSpec = null;
 
     public TablePartition(){
     }
 
     public TablePartition (ASTNode tblPartthrows SemanticException {
        = unescapeIdentifier(tblPart.getChild(0).getText());
       if (tblPart.getChildCount() > 1) {
         ASTNode part = (ASTNodetblPart.getChild(1);
         if (part.getToken().getType() == .) {
          this. = DDLSemanticAnalyzer.getPartSpec(part);
         }
       }
     }
   }
 
   public DDLSemanticAnalyzer(HiveConf confthrows SemanticException {
     super(conf);
     // Partition can't have this name
     // Partition value can't end in this suffix
   }
 
   public void analyzeInternal(ASTNode astthrows SemanticException {
 
     switch(ast.getToken().getType()) {
       TablePartition tblPart = new TablePartition((ASTNode)ast.getChild(0));
       String tableName = tblPart.tableName;
       HashMap<StringStringpartSpec = tblPart.partSpec;
       ast = (ASTNode)ast.getChild(1);
       if (ast.getToken().getType() == .) {
         analyzeAlterTableFileFormat(asttableNamepartSpec);
       } else if (ast.getToken().getType() == .) {
         analyzeAlterTableProtectMode(asttableNamepartSpec);
       } else if (ast.getToken().getType() == .) {
         analyzeAlterTableLocation(asttableNamepartSpec);
       }
       break;
     }
     case .:
       analyzeDropTable(astfalse);
       break;
       analyzeCreateIndex(ast);
       break;
     case .:
       analyzeDropIndex(ast);
       break;
     case .:
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeDescribeTable(ast);
       break;
     case :
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowDatabases(ast);
       break;
     case .:
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowTables(ast);
       break;
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowTableStatus(ast);
       break;
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowFunctions(ast);
       break;
     case .:
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowLocks(ast);
       break;
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeDescFunction(ast);
       break;
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeDescDatabase(ast);
       break;
     case .:
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeMetastoreCheck(ast);
       break;
     case .:
       analyzeDropTable(asttrue);
       break;
       analyzeAlterTableProps(asttrue);
       break;
       analyzeAlterTableRename(ast);
       break;
       analyzeAlterTableTouch(ast);
       break;
       analyzeAlterTableArchive(astfalse);
       break;
       analyzeAlterTableArchive(asttrue);
       break;
       break;
       break;
       analyzeAlterTableRenameCol(ast);
       break;
       analyzeAlterTableAddParts(ast);
       break;
       analyzeAlterTableDropParts(ast);
       break;
       analyzeAlterTableProps(astfalse);
       break;
       analyzeAlterTableSerdeProps(ast);
       break;
       analyzeAlterTableSerde(ast);
       break;
       break;
       analyzeAlterIndexRebuild(ast);
       break;
       analyzeAlterIndexProps(ast);
       break;
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowPartitions(ast);
       break;
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowIndexes(ast);
       break;
     case .:
       analyzeLockTable(ast);
       break;
       analyzeUnlockTable(ast);
       break;
       analyzeCreateDatabase(ast);
       break;
       analyzeDropDatabase(ast);
       break;
       analyzeSwitchDatabase(ast);
       break;
       analyzeAlterDatabase(ast);
       break;
     case .:
       analyzeCreateRole(ast);
       break;
     case .:
       analyzeDropRole(ast);
       break;
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowRoleGrant(ast);
       break;
     case .:
       analyzeGrantRevokeRole(trueast);
       break;
       analyzeGrantRevokeRole(falseast);
       break;
     case .:
       analyzeGrant(ast);
       break;
     case .:
       .setResFile(new Path(.getLocalTmpFileURI()));
       analyzeShowGrant(ast);
       break;
     case .:
       analyzeRevoke(ast);
       break;
     default:
       throw new SemanticException("Unsupported command.");
     }
   }
 
   private void analyzeGrantRevokeRole(boolean grantASTNode ast) {
     List<PrincipalDescprincipalDesc = analyzePrincipalListDef(
         (ASTNodeast.getChild(0));
     List<Stringroles = new ArrayList<String>();
     for (int i = 1; i < ast.getChildCount(); i++) {
       roles.add(unescapeIdentifier(ast.getChild(i).getText()));
     }
     String roleOwnerName = "";
     if (SessionState.get() != null
         && SessionState.get().getAuthenticator() != null) {
       roleOwnerName = SessionState.get().getAuthenticator().getUserName();
     }
     GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(grant,
         rolesprincipalDescroleOwnerName, PrincipalType.USER, true);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         grantRevokeRoleDDL), ));
   }
 
   private void analyzeShowGrant(ASTNode astthrows SemanticException {
     PrivilegeObjectDesc privHiveObj = null;
 
     ASTNode principal = (ASTNodeast.getChild(0);
     PrincipalType type = PrincipalType.USER;
     switch (principal.getType()) {
     case .:
       type = PrincipalType.USER;
       break;
     case .:
       type = PrincipalType.GROUP;
       break;
     case .:
       type = PrincipalType.ROLE;
       break;
     }
     String principalName = unescapeIdentifier(principal.getChild(0).getText());
     PrincipalDesc principalDesc = new PrincipalDesc(principalNametype);
     List<Stringcols = null;
     if (ast.getChildCount() > 1) {
       ASTNode child = (ASTNodeast.getChild(1);
       if (child.getToken().getType() == .) {
         privHiveObj = new PrivilegeObjectDesc();
         privHiveObj.setObject(unescapeIdentifier(child.getChild(0).getText()));
         if (child.getChildCount() > 1) {
           for (int i = 1; i < child.getChildCount(); i++) {
             ASTNode grandChild = (ASTNodechild.getChild(i);
             if (grandChild.getToken().getType() == .) {
               privHiveObj.setPartSpec(DDLSemanticAnalyzer.getPartSpec(grandChild));
             } else if (grandChild.getToken().getType() == .) {
               cols = getColumnNames((ASTNodegrandChild);
             } else {
               privHiveObj.setTable(child.getChild(i) != null);
             }
           }
         }
       }
     }
 
     if (privHiveObj == null && cols != null) {
       throw new SemanticException(
           "For user-level privileges, column sets should be null. columns="
               + cols.toString());
     }
 
     ShowGrantDesc showGrant = new ShowGrantDesc(.getResFile().toString(),
         principalDescprivHiveObjcols);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         showGrant), ));
   }
 
   private void analyzeGrant(ASTNode astthrows SemanticException {
     List<PrivilegeDescprivilegeDesc = analyzePrivilegeListDef(
         (ASTNodeast.getChild(0));
     List<PrincipalDescprincipalDesc = analyzePrincipalListDef(
         (ASTNodeast.getChild(1));
     boolean grantOption = false;
     PrivilegeObjectDesc privilegeObj = null;
 
     if (ast.getChildCount() > 2) {
       for (int i = 2; i < ast.getChildCount(); i++) {
         ASTNode astChild = (ASTNodeast.getChild(i);
         if (astChild.getType() == .) {
           grantOption = true;
         } else if (astChild.getType() == .) {
           privilegeObj = analyzePrivilegeObject(astChild);
         }
       }
     }
 
     String userName = null;
     if (SessionState.get() != null
         && SessionState.get().getAuthenticator() != null) {
       userName = SessionState.get().getAuthenticator().getUserName();
     }
 
     GrantDesc grantDesc = new GrantDesc(privilegeObjprivilegeDesc,
         principalDescuserName, PrincipalType.USER, grantOption);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         grantDesc), ));
   }
 
   private void analyzeRevoke(ASTNode astthrows SemanticException {
     List<PrivilegeDescprivilegeDesc = analyzePrivilegeListDef(
         (ASTNodeast.getChild(0));
     List<PrincipalDescprincipalDesc = analyzePrincipalListDef(
         (ASTNodeast.getChild(1));
     PrivilegeObjectDesc hiveObj = null;
     if (ast.getChildCount() > 2) {
       ASTNode astChild = (ASTNodeast.getChild(2);
       hiveObj = analyzePrivilegeObject(astChild);
     }
 
     RevokeDesc revokeDesc = new RevokeDesc(privilegeDescprincipalDeschiveObj);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         revokeDesc), ));
   }
 
 
       throws SemanticException {
     PrivilegeObjectDesc subject = new PrivilegeObjectDesc();
     subject.setObject(unescapeIdentifier(ast.getChild(0).getText()));
     if (ast.getChildCount() > 1) {
       for (int i =0;iast.getChildCount();i++) {
         ASTNode astChild = (ASTNodeast.getChild(i);
         if (astChild.getToken().getType() == .) {
           subject.setPartSpec(DDLSemanticAnalyzer.getPartSpec(astChild));
         } else {
           subject.setTable(ast.getChild(0) != null);
         }
       }
     }
     return subject;
   }
 
     List<PrincipalDescprincipalList = new ArrayList<PrincipalDesc>();
 
     for (int i = 0; i < node.getChildCount(); i++) {
       ASTNode child = (ASTNodenode.getChild(i);
       PrincipalType type = null;
       switch (child.getType()) {
       case .:
         type = PrincipalType.USER;
         break;
       case .:
         type = PrincipalType.GROUP;
         break;
       case .:
         type = PrincipalType.ROLE;
         break;
       }
       String principalName = unescapeIdentifier(child.getChild(0).getText());
       PrincipalDesc principalDesc = new PrincipalDesc(principalNametype);
       principalList.add(principalDesc);
     }
 
     return principalList;
   }
 
       throws SemanticException {
     List<PrivilegeDescret = new ArrayList<PrivilegeDesc>();
     for (int i = 0; i < node.getChildCount(); i++) {
       ASTNode privilegeDef = (ASTNodenode.getChild(i);
       ASTNode privilegeType = (ASTNodeprivilegeDef.getChild(0);
       Privilege privObj = PrivilegeRegistry.getPrivilege(privilegeType.getType());
 
       if (privObj == null) {
         throw new SemanticException("undefined privilege " + privObj.toString());
       }
       List<Stringcols = null;
       if (privilegeDef.getChildCount() > 1) {
         cols = getColumnNames((ASTNodeprivilegeDef.getChild(1));
       }
       PrivilegeDesc privilegeDesc = new PrivilegeDesc(privObjcols);
       ret.add(privilegeDesc);
     }
     return ret;
   }
 
   private void analyzeCreateRole(ASTNode ast) {
     String roleName = unescapeIdentifier(ast.getChild(0).getText());
     RoleDDLDesc createRoleDesc = new RoleDDLDesc(roleName,
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createRoleDesc), ));
   }
 
   private void analyzeDropRole(ASTNode ast) {
     String roleName = unescapeIdentifier(ast.getChild(0).getText());
     RoleDDLDesc createRoleDesc = new RoleDDLDesc(roleName,
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createRoleDesc), ));
   }
 
   private void analyzeShowRoleGrant(ASTNode ast) {
     ASTNode child = (ASTNodeast.getChild(0);
     PrincipalType principalType = PrincipalType.USER;
     switch (child.getType()) {
     case .:
       principalType = PrincipalType.USER;
       break;
     case .:
       principalType = PrincipalType.GROUP;
       break;
     case .:
       principalType = PrincipalType.ROLE;
       break;
     }
     String principalName = unescapeIdentifier(child.getChild(0).getText());
     RoleDDLDesc createRoleDesc = new RoleDDLDesc(principalNameprincipalType,
         ..null);
     createRoleDesc.setResFile(.getResFile().toString());
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createRoleDesc), ));
   }
 
   private void analyzeAlterDatabase(ASTNode astthrows SemanticException {
 
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
     Map<StringStringdbProps = null;
 
     for (int i = 1; i < ast.getChildCount(); i++) {
       ASTNode childNode = (ASTNodeast.getChild(i);
       switch (childNode.getToken().getType()) {
         dbProps = DDLSemanticAnalyzer.getProps((ASTNodechildNode.getChild(0));
         break;
       default:
         throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
       }
     }
 
     // currently alter database command can only change properties
     AlterDatabaseDesc alterDesc = new AlterDatabaseDesc(dbNamenullnullfalse);
     alterDesc.setDatabaseProperties(dbProps);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc),
         ));
 
   }
 
   private void analyzeCreateDatabase(ASTNode astthrows SemanticException {
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
     boolean ifNotExists = false;
     String dbComment = null;
     Map<StringStringdbProps = null;
 
     for (int i = 1; i < ast.getChildCount(); i++) {
       ASTNode childNode = (ASTNodeast.getChild(i);
       switch (childNode.getToken().getType()) {
       case :
         ifNotExists = true;
         break;
       case :
         dbComment = unescapeSQLString(childNode.getChild(0).getText());
         break;
         dbProps = DDLSemanticAnalyzer.getProps((ASTNodechildNode.getChild(0));
         break;
       default:
         throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
       }
     }
 
     CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc();
     createDatabaseDesc.setName(dbName);
     createDatabaseDesc.setComment(dbComment);
     createDatabaseDesc.setIfNotExists(ifNotExists);
     createDatabaseDesc.setLocationUri(null);
     if (dbProps != null) {
       createDatabaseDesc.setDatabaseProperties(dbProps);
     }
 
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         createDatabaseDesc), ));
   }
 
   private void analyzeDropDatabase(ASTNode astthrows SemanticException {
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
     boolean ifExists = false;
 
     if (null != ast.getFirstChildWithType()) {
       ifExists = true;
     }
 
     DropDatabaseDesc dropDatabaseDesc = new DropDatabaseDesc(dbNameifExists);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropDatabaseDesc), ));
   }
 
   private void analyzeSwitchDatabase(ASTNode ast) {
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
     SwitchDatabaseDesc switchDatabaseDesc = new SwitchDatabaseDesc(dbName);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         switchDatabaseDesc), ));
   }
 
 
 
   private void analyzeDropTable(ASTNode astboolean expectView)
       throws SemanticException {
     String tableName = getUnescapedName((ASTNode)ast.getChild(0));
     boolean ifExists = (ast.getFirstChildWithType() != null);
     // we want to signal an error if the table/view doesn't exist and we're
     // configured not to fail silently
     boolean throwException =
       !ifExists && !HiveConf.getBoolVar(.);
     try {
       Table tab = .getTable(.getCurrentDatabase(), tableNamethrowException);
       if (tab != null) {
         .add(new ReadEntity(tab));
         .add(new WriteEntity(tab));
       }
     } catch (HiveException e) {
       throw new SemanticException(..getMsg(tableName));
     }
 
     DropTableDesc dropTblDesc = new DropTableDesc(tableNameexpectView);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropTblDesc), ));
   }
 
   private void analyzeCreateIndex(ASTNode astthrows SemanticException {
     String indexName = unescapeIdentifier(ast.getChild(0).getText());
     String typeName = unescapeSQLString(ast.getChild(1).getText());
     String tableName = getUnescapedName((ASTNode)ast.getChild(2));
     List<StringindexedCols = getColumnNames((ASTNodeast.getChild(3));
 
     IndexType indexType = HiveIndex.getIndexType(typeName);
     if (indexType != null) {
       typeName = indexType.getHandlerClsName();
     } else {
       try {
         Class.forName(typeName);
       } catch (Exception e) {
         throw new SemanticException("class name provided for index handler not found."e);
       }
     }
 
     String indexTableName = null;
     boolean deferredRebuild = false;
     String location = null;
     Map<StringStringtblProps = null;
     Map<StringStringidxProps = null;
     String indexComment = null;
 
     RowFormatParams rowFormatParams = new RowFormatParams();
     StorageFormat storageFormat = new StorageFormat();
 
     for (int idx = 4; idx < ast.getChildCount(); idx++) {
       ASTNode child = (ASTNodeast.getChild(idx);
       if (storageFormat.fillStorageFormat(childshared)) {
         continue;
       }
       switch (child.getToken().getType()) {
       case .:
         rowFormatParams.analyzeRowFormat(sharedchild);
         break;
         ASTNode ch = (ASTNodechild.getChild(0);
         indexTableName = getUnescapedName((ASTNode)ch);
         break;
         deferredRebuild = true;
         break;
       case .:
         location = unescapeSQLString(child.getChild(0).getText());
         break;
       case .:
         tblProps = DDLSemanticAnalyzer.getProps((ASTNodechild.getChild(0));
         break;
       case .:
         idxProps = DDLSemanticAnalyzer.getProps((ASTNodechild.getChild(0));
         break;
       case .:
         child = (ASTNodechild.getChild(0);
         shared.serde = unescapeSQLString(child.getChild(0).getText());
         if (child.getChildCount() == 2) {
           readProps((ASTNode) (child.getChild(1).getChild(0)),
               shared.serdeProps);
         }
         break;
       case .:
         child = (ASTNodechild.getChild(0);
         indexComment = unescapeSQLString(child.getText());
       }
     }
 
     storageFormat.fillDefaultStorageFormat(shared);
 
     CreateIndexDesc crtIndexDesc = new CreateIndexDesc(tableNameindexName,
         indexedColsindexTableNamedeferredRebuildstorageFormat.inputFormatstorageFormat.outputFormat,
         storageFormat.storageHandlertypeNamelocationidxPropstblProps,
         shared.serdeshared.serdePropsrowFormatParams.collItemDelim,
         rowFormatParams.fieldDelimrowFormatParams.fieldEscape,
         rowFormatParams.lineDelimrowFormatParams.mapKeyDelimindexComment);
     Task<?> createIndex = TaskFactory.get(new DDLWork(crtIndexDesc), );
     .add(createIndex);
   }
 
   private void analyzeDropIndex(ASTNode astthrows SemanticException {
     String indexName = unescapeIdentifier(ast.getChild(0).getText());
     String tableName = getUnescapedName((ASTNode)ast.getChild(1));
     boolean ifExists = (ast.getFirstChildWithType() != null);
     // we want to signal an error if the index doesn't exist and we're
     // configured not to ignore this
     boolean throwException =
       !ifExists && !HiveConf.getBoolVar(.);
     if (throwException) {
       try {
         Index idx = .getIndex(tableNameindexName);
       } catch (HiveException e) {
         throw new SemanticException(..getMsg(indexName));
       }
     }
 
     DropIndexDesc dropIdxDesc = new DropIndexDesc(indexNametableName);
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         dropIdxDesc), ));
   }
 
   private void analyzeAlterIndexRebuild(ASTNode astthrows SemanticException {
     String baseTableName = unescapeIdentifier(ast.getChild(0).getText());
     String indexName = unescapeIdentifier(ast.getChild(1).getText());
     HashMap<StringStringpartSpec = null;
     Tree part = ast.getChild(2);
     if (part != null) {
       partSpec = extractPartitionSpecs(part);
     }
     List<Task<?>> indexBuilder = getIndexBuilderMapRed(baseTableNameindexNamepartSpec);
     .addAll(indexBuilder);
   }
 
   private void analyzeAlterIndexProps(ASTNode ast)
     throws SemanticException {
 
     String baseTableName = getUnescapedName((ASTNode)ast.getChild(0));
     String indexName = unescapeIdentifier(ast.getChild(1).getText());
     HashMap<StringStringmapProp = getProps((ASTNode) (ast.getChild(2))
         .getChild(0));
 
     AlterIndexDesc alterIdxDesc =
     alterIdxDesc.setProps(mapProp);
     alterIdxDesc.setIndexName(indexName);
     alterIdxDesc.setBaseTableName(baseTableName);
     alterIdxDesc.setDbName(.getCurrentDatabase());
 
     .add(TaskFactory.get(new DDLWork(alterIdxDesc), ));
   }
 
   private List<Task<?>> getIndexBuilderMapRed(String baseTableNameString indexName,
       HashMap<StringStringpartSpecthrows SemanticException {
     try {
       String dbName = .getCurrentDatabase();
       Index index = .getIndex(dbNamebaseTableNameindexName);
       Table indexTbl = .getTable(dbNameindex.getIndexTableName());
       String baseTblName = index.getOrigTableName();
       Table baseTbl = .getTable(dbNamebaseTblName);
 
       String handlerCls = index.getIndexHandlerClass();
       HiveIndexHandler handler = HiveUtils.getIndexHandler(handlerCls);
 
       List<PartitionindexTblPartitions = null;
       List<PartitionbaseTblPartitions = null;
       if(indexTbl != null) {
         indexTblPartitions = new ArrayList<Partition>();
         baseTblPartitions = preparePartitions(baseTblpartSpec,
             indexTblindexTblPartitions);
       }
 
       List<Task<?>> ret = handler.generateIndexBuildTaskList(baseTbl,
           indexindexTblPartitionsbaseTblPartitionsindexTblgetInputs(), getOutputs());
       return ret;
     } catch (Exception e) {
       throw new SemanticException(e);
     }
   }
 
       org.apache.hadoop.hive.ql.metadata.Table baseTbl,
       HashMap<StringStringpartSpec,
       org.apache.hadoop.hive.ql.metadata.Table indexTblHive db,
       List<PartitionindexTblPartitions)
       throws HiveException, MetaException {
     List<PartitionbaseTblPartitions = new ArrayList<Partition>();
     if (partSpec != null) {
       // if partspec is specified, then only producing index for that
       // partition
       Partition part = db.getPartition(baseTblpartSpecfalse);
       if (part == null) {
         throw new HiveException("Partition "
             + Warehouse.makePartName(partSpecfalse)
             + " does not exist in table "
             + baseTbl.getTableName());
       }
       baseTblPartitions.add(part);
       Partition indexPart = db.getPartition(indexTblpartSpecfalse);
       if (indexPart == null) {
         indexPart = db.createPartition(indexTblpartSpec);
       }
       indexTblPartitions.add(indexPart);
     } else if (baseTbl.isPartitioned()) {
       // if no partition is specified, create indexes for all partitions one
       // by one.
       baseTblPartitions = db.getPartitions(baseTbl);
       for (Partition basePart : baseTblPartitions) {
         HashMap<StringStringpSpec = basePart.getSpec();
         Partition indexPart = db.getPartition(indexTblpSpecfalse);
         if (indexPart == null) {
           indexPart = db.createPartition(indexTblpSpec);
         }
         indexTblPartitions.add(indexPart);
       }
     }
     return baseTblPartitions;
   }
 
   private void analyzeAlterTableProps(ASTNode astboolean expectView)
     throws SemanticException {
 
     String tableName = getUnescapedName((ASTNode)ast.getChild(0));
     HashMap<StringStringmapProp = getProps((ASTNode) (ast.getChild(1))
         .getChild(0));
     AlterTableDesc alterTblDesc =
       new AlterTableDesc(.expectView);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
 
     try {
       Table tab = .getTable(.getCurrentDatabase(), tableNamefalse);
       if (tab != null) {
         .add(new ReadEntity(tab));
         .add(new WriteEntity(tab));
       }
     } catch (HiveException e) {
       throw new SemanticException(..getMsg(tableName));
     }
 
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), ));
   }
 
   private void analyzeAlterTableSerdeProps(ASTNode ast)
       throws SemanticException {
     String tableName = getUnescapedName((ASTNode)ast.getChild(0));
     HashMap<StringStringmapProp = getProps((ASTNode) (ast.getChild(1))
         .getChild(0));
     AlterTableDesc alterTblDesc = new AlterTableDesc(
         .);
     alterTblDesc.setProps(mapProp);
     alterTblDesc.setOldName(tableName);
 
     try {
       Table tab = .getTable(.getCurrentDatabase(), tableNamefalse);
       if (tab != null) {
         .add(new ReadEntity(tab));
         .add(new WriteEntity(tab));
       }
     } catch (HiveException e) {
       throw new SemanticException(..getMsg(tableName));
     }
 
     .add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
         alterTblDesc), ));