Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
  
  
  package org.apache.hadoop.hive.metastore;
  
  import java.util.HashMap;
  import java.util.HashSet;
  import java.util.List;
  import java.util.Map;
  import java.util.Set;
  
  
  import  org.apache.hadoop.fs.FileSystem;
  import  org.apache.hadoop.fs.Path;
  import  org.apache.hadoop.hive.serde.Constants;
  import  org.apache.hadoop.util.StringUtils;
  import  org.apache.thrift.TException;
  
  public abstract class TestHiveMetaStore extends TestCase {
    protected static HiveMetaStoreClient client;
    protected static HiveConf hiveConf;
    protected static Warehouse warehouse;
    protected static boolean isThriftClient = false;
  
    private static final String TEST_DB1_NAME = "testdb1";
    private static final String TEST_DB2_NAME = "testdb2";
  
    @Override
    protected void setUp() throws Exception {
       = new HiveConf(this.getClass());
       = new Warehouse();
  
      // set some values to use for getting conf. vars
      .set("hive.metastore.metrics.enabled","true");
      .set("hive.key1""value1");
      .set("hive.key2""http://www.example.com");
      .set("hive.key3""");
      .set("hive.key4""0");
    }
  
    public void testNameMethods() {
      Map<StringStringspec = new LinkedHashMap<StringString>();
      spec.put("ds""2008-07-01 14:13:12");
      spec.put("hr""14");
      List<Stringvals = new ArrayList<String>();
      for(String v : spec.values()) {
        vals.add(v);
      }
      String partName = "ds=2008-07-01 14%3A13%3A12/hr=14";
  
      try {
        List<StringtestVals = .partitionNameToVals(partName);
        assertTrue("Values from name are incorrect"vals.equals(testVals));
  
        Map<StringStringtestSpec = .partitionNameToSpec(partName);
        assertTrue("Spec from name is incorrect"spec.equals(testSpec));
  
        List<StringemptyVals = .partitionNameToVals("");
        assertTrue("Values should be empty"emptyVals.size() == 0);
  
        Map<StringStringemptySpec =  .partitionNameToSpec("");
        assertTrue("Spec should be empty"emptySpec.size() == 0);
      } catch (Exception e) {
        assert(false);
      }
    }

  
tests create table and partition and tries to drop the table without droppping the partition

Throws:
Exception
 
   public void testPartition() throws Exception {
   }
 
   public static void partitionTester(HiveMetaStoreClient clientHiveConf hiveConf)
     throws Exception {
     try {
       String dbName = "compdb";
       String tblName = "comptbl";
       String typeName = "Person";
       List<Stringvals = new ArrayList<String>(2);
       vals.add("2008-07-01 14:13:12");
       vals.add("14");
       List <Stringvals2 = new ArrayList<String>(2);
       vals2.add("2008-07-01 14:13:12");
       vals2.add("15");
       List <Stringvals3 = new ArrayList<String>(2);
       vals3 = new ArrayList<String>(2);
       vals3.add("2008-07-02 14:13:12");
       vals3.add("15");
       List <Stringvals4 = new ArrayList<String>(2);
       vals4 = new ArrayList<String>(2);
       vals4.add("2008-07-03 14:13:12");
       vals4.add("151");
 
       client.dropTable(dbNametblName);
       silentDropDatabase(dbName);
       Database db = new Database();
       db.setName(dbName);
       client.createDatabase(db);
 
       client.dropType(typeName);
       Type typ1 = new Type();
       typ1.setName(typeName);
       typ1.setFields(new ArrayList<FieldSchema>(2));
       typ1.getFields().add(
           new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
       typ1.getFields().add(
           new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
       client.createType(typ1);
 
       Table tbl = new Table();
       tbl.setDbName(dbName);
       tbl.setTableName(tblName);
       StorageDescriptor sd = new StorageDescriptor();
       tbl.setSd(sd);
       sd.setCols(typ1.getFields());
       sd.setCompressed(false);
       sd.setNumBuckets(1);
       sd.setParameters(new HashMap<StringString>());
       sd.getParameters().put("test_param_1""Use this for comments etc");
       sd.setBucketCols(new ArrayList<String>(2));
       sd.getBucketCols().add("name");
       sd.setSerdeInfo(new SerDeInfo());
       sd.getSerdeInfo().setName(tbl.getTableName());
       sd.getSerdeInfo().setParameters(new HashMap<StringString>());
       sd.getSerdeInfo().getParameters()
           .put(Constants.SERIALIZATION_FORMAT, "1");
       sd.setSortCols(new ArrayList<Order>());
 
       tbl.setPartitionKeys(new ArrayList<FieldSchema>(2));
       tbl.getPartitionKeys().add(
           new FieldSchema("ds", Constants.STRING_TYPE_NAME, ""));
       tbl.getPartitionKeys().add(
           new FieldSchema("hr", Constants.STRING_TYPE_NAME, ""));
 
       client.createTable(tbl);
 
       if () {
         // the createTable() above does not update the location in the 'tbl'
         // object when the client is a thrift client and the code below relies
         // on the location being present in the 'tbl' object - so get the table
         // from the metastore
         tbl = client.getTable(dbNametblName);
       }
 
       Partition part = new Partition();
       part.setDbName(dbName);
       part.setTableName(tblName);
       part.setValues(vals);
       part.setParameters(new HashMap<StringString>());
       part.setSd(tbl.getSd());
       part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
       part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
 
       Partition part2 = new Partition();
       part2.setDbName(dbName);
       part2.setTableName(tblName);
       part2.setValues(vals2);
       part2.setParameters(new HashMap<StringString>());
       part2.setSd(tbl.getSd());
       part2.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
       part2.getSd().setLocation(tbl.getSd().getLocation() + "/part2");
 
       Partition part3 = new Partition();
       part3.setDbName(dbName);
       part3.setTableName(tblName);
       part3.setValues(vals3);
       part3.setParameters(new HashMap<StringString>());
       part3.setSd(tbl.getSd());
       part3.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
       part3.getSd().setLocation(tbl.getSd().getLocation() + "/part3");
 
       Partition part4 = new Partition();
       part4.setDbName(dbName);
       part4.setTableName(tblName);
       part4.setValues(vals4);
       part4.setParameters(new HashMap<StringString>());
       part4.setSd(tbl.getSd());
       part4.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
       part4.getSd().setLocation(tbl.getSd().getLocation() + "/part4");
 
       // check if the partition exists (it shouldn;t)
       boolean exceptionThrown = false;
       try {
         Partition p = client.getPartition(dbNametblNamevals);
       } catch(Exception e) {
         assertEquals("partition should not have existed",
             NoSuchObjectException.classe.getClass());
         exceptionThrown = true;
       }
       assertTrue("getPartition() should have thrown NoSuchObjectException"exceptionThrown);
       Partition retp = client.add_partition(part);
       assertNotNull("Unable to create partition " + partretp);
       Partition retp2 = client.add_partition(part2);
       assertNotNull("Unable to create partition " + part2retp2);
       Partition retp3 = client.add_partition(part3);
       assertNotNull("Unable to create partition " + part3retp3);
       Partition retp4 = client.add_partition(part4);
       assertNotNull("Unable to create partition " + part4retp4);
 
       Partition part_get = client.getPartition(dbNametblNamepart.getValues());
       if() {
         // since we are using thrift, 'part' will not have the create time and
         // last DDL time set since it does not get updated in the add_partition()
         // call - likewise part2 and part3 - set it correctly so that equals check
         // doesn't fail
         adjust(clientpartdbNametblName);
         adjust(clientpart2dbNametblName);
         adjust(clientpart3dbNametblName);
       }
       assertTrue("Partitions are not same"part.equals(part_get));
 
       String partName = "ds=2008-07-01 14%3A13%3A12/hr=14";
       String part2Name = "ds=2008-07-01 14%3A13%3A12/hr=15";
       String part3Name ="ds=2008-07-02 14%3A13%3A12/hr=15";
 
       part_get = client.getPartition(dbNametblNamepartName);
       assertTrue("Partitions are not the same"part.equals(part_get));
 
       // Test partition listing with a partial spec - ds is specified but hr is not
       List<StringpartialVals = new ArrayList<String>();
       partialVals.add(vals.get(0));
       Set<Partitionparts = new HashSet<Partition>();
       parts.add(part);
       parts.add(part2);
 
       List<Partitionpartial = client.listPartitions(dbNametblNamepartialVals,
           (short) -1);
       assertTrue("Should have returned 2 partitions"partial.size() == 2);
       assertTrue("Not all parts returned"partial.containsAll(parts));
 
       Set<StringpartNames = new HashSet<String>();
       partNames.add(partName);
       partNames.add(part2Name);
       List<StringpartialNames = client.listPartitionNames(dbNametblNamepartialVals,
           (short) -1);
       assertTrue("Should have returned 2 partition names"partialNames.size() == 2);
       assertTrue("Not all part names returned"partialNames.containsAll(partNames));
 
       // Test partition listing with a partial spec - hr is specified but ds is not
       parts.clear();
       parts.add(part2);
       parts.add(part3);
 
       partialVals.clear();
       partialVals.add("");
       partialVals.add(vals2.get(1));
 
       partial = client.listPartitions(dbNametblNamepartialVals, (short) -1);
       assertEquals("Should have returned 2 partitions", 2, partial.size());
       assertTrue("Not all parts returned"partial.containsAll(parts));
 
       partNames.clear();
       partNames.add(part2Name);
       partNames.add(part3Name);
       partialNames = client.listPartitionNames(dbNametblNamepartialVals,
           (short) -1);
       assertEquals("Should have returned 2 partition names", 2, partialNames.size());
       assertTrue("Not all part names returned"partialNames.containsAll(partNames));
 
       // Verify escaped partition names don't return partitions
       exceptionThrown = false;
       try {
         String badPartName = "ds=2008-07-01 14%3A13%3A12/hrs=14";
         client.getPartition(dbNametblNamebadPartName);
       } catch(NoSuchObjectException e) {
         exceptionThrown = true;
       }
       assertTrue("Bad partition spec should have thrown an exception"exceptionThrown);
 
       Path partPath = new Path(part2.getSd().getLocation());
       FileSystem fs = FileSystem.get(partPath.toUri(), hiveConf);
 
 
       assertTrue(fs.exists(partPath));
       client.dropPartition(dbNametblNamepart.getValues(), true);
       assertFalse(fs.exists(partPath));
 
       // Test append_partition_by_name
       client.appendPartition(dbNametblNamepartName);
       Partition part5 = client.getPartition(dbNametblNamepart.getValues());
       assertTrue("Append partition by name failed"part5.getValues().equals(vals));;
       Path part5Path = new Path(part5.getSd().getLocation());
       assertTrue(fs.exists(part5Path));
 
       // Test drop_partition_by_name
       assertTrue("Drop partition by name failed",
           client.dropPartition(dbNametblNamepartNametrue));
       assertFalse(fs.exists(part5Path));
 
       // add the partition again so that drop table with a partition can be
       // tested
       retp = client.add_partition(part);
       assertNotNull("Unable to create partition " + partretp);
 
       client.dropTable(dbNametblName);
 
       client.dropType(typeName);
 
       // recreate table as external, drop partition and it should
       // still exist
       tbl.setParameters(new HashMap<StringString>());
       tbl.getParameters().put("EXTERNAL""TRUE");
       client.createTable(tbl);
       retp = client.add_partition(part);
       assertTrue(fs.exists(partPath));
       client.dropPartition(dbNametblNamepart.getValues(), true);
       assertTrue(fs.exists(partPath));
 
       for (String tableName : client.getTables(dbName"*")) {
         client.dropTable(dbNametableName);
       }
 
       client.dropDatabase(dbName);
 
     } catch (Exception e) {
       ..println(StringUtils.stringifyException(e));
       ..println("testPartition() failed.");
       throw e;
     }
   }
 
   public void testAlterPartition() throws Throwable {
 
     try {
       String dbName = "compdb";
       String tblName = "comptbl";
       List<Stringvals = new ArrayList<String>(2);
       vals.add("2008-07-01");
       vals.add("14");
 
       .dropTable(dbNametblName);
       silentDropDatabase(dbName);
       Database db = new Database();
       db.setName(dbName);
       db.setDescription("Alter Partition Test database");
       .createDatabase(db);
 
       ArrayList<FieldSchemacols = new ArrayList<FieldSchema>(2);
       cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
       cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
 
       Table tbl = new Table();
       tbl.setDbName(dbName);
       tbl.setTableName(tblName);
       StorageDescriptor sd = new StorageDescriptor();
       tbl.setSd(sd);
       sd.setCols(cols);
       sd.setCompressed(false);
       sd.setNumBuckets(1);
       sd.setParameters(new HashMap<StringString>());
       sd.getParameters().put("test_param_1""Use this for comments etc");
       sd.setBucketCols(new ArrayList<String>(2));
       sd.getBucketCols().add("name");
       sd.setSerdeInfo(new SerDeInfo());
       sd.getSerdeInfo().setName(tbl.getTableName());
       sd.getSerdeInfo().setParameters(new HashMap<StringString>());
       sd.getSerdeInfo().getParameters()
           .put(Constants.SERIALIZATION_FORMAT, "1");
       sd.setSortCols(new ArrayList<Order>());
 
       tbl.setPartitionKeys(new ArrayList<FieldSchema>(2));
       tbl.getPartitionKeys().add(
           new FieldSchema("ds", Constants.STRING_TYPE_NAME, ""));
       tbl.getPartitionKeys().add(
           new FieldSchema("hr", Constants.INT_TYPE_NAME, ""));
 
       .createTable(tbl);
 
       if () {
         // the createTable() above does not update the location in the 'tbl'
         // object when the client is a thrift client and the code below relies
         // on the location being present in the 'tbl' object - so get the table
         // from the metastore
         tbl = .getTable(dbNametblName);
       }
 
       Partition part = new Partition();
       part.setDbName(dbName);
       part.setTableName(tblName);
       part.setValues(vals);
       part.setParameters(new HashMap<StringString>());
       part.setSd(tbl.getSd());
       part.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo());
       part.getSd().setLocation(tbl.getSd().getLocation() + "/part1");
 
       .add_partition(part);
 
       Partition part2 = .getPartition(dbNametblNamepart.getValues());
 
       part2.getParameters().put("retention""10");
       part2.getSd().setNumBuckets(12);
       part2.getSd().getSerdeInfo().getParameters().put("abc""1");
       .alter_partition(dbNametblNamepart2);
 
       Partition part3 = .getPartition(dbNametblNamepart.getValues());
       assertEquals("couldn't alter partition"part3.getParameters().get(
           "retention"), "10");
       assertEquals("couldn't alter partition"part3.getSd().getSerdeInfo()
           .getParameters().get("abc"), "1");
       assertEquals("couldn't alter partition"part3.getSd().getNumBuckets(),
           12);
 
       .dropTable(dbNametblName);
 
       .dropDatabase(dbName);
     } catch (Exception e) {
       ..println(StringUtils.stringifyException(e));
       ..println("testPartition() failed.");
       throw e;
     }
   }
 
   public void testDatabase() throws Throwable {
     try {
       // clear up any existing databases
 
       Database db = new Database();
       db.setName();
       .createDatabase(db);
 
       db = .getDatabase();
 
       assertEquals("name of returned db is different from that of inserted db",
           db.getName());
       assertEquals("location of the returned db is different from that of inserted db",
           .getDefaultDatabasePath().toString(), db.getLocationUri());
 
       Database db2 = new Database();
       db2.setName();
       .createDatabase(db2);
 
       db2 = .getDatabase();
 
       assertEquals("name of returned db is different from that of inserted db",
           db2.getName());
       assertEquals("location of the returned db is different from that of inserted db",
           .getDefaultDatabasePath().toString(), db2.getLocationUri());
 
       List<Stringdbs = .getDatabases(".*");
 
       assertTrue("first database is not " + dbs.contains());
       assertTrue("second database is not " + dbs.contains());
 
     } catch (Throwable e) {
       ..println(StringUtils.stringifyException(e));
       ..println("testDatabase() failed.");
       throw e;
     }
   }
 
   public void testSimpleTypeApi() throws Exception {
     try {
       .dropType(Constants.INT_TYPE_NAME);
 
       Type typ1 = new Type();
       typ1.setName(Constants.INT_TYPE_NAME);
       boolean ret = .createType(typ1);
       assertTrue("Unable to create type"ret);
 
       Type typ1_2 = .getType(Constants.INT_TYPE_NAME);
       assertNotNull(typ1_2);
       assertEquals(typ1.getName(), typ1_2.getName());
 
       ret = .dropType(Constants.INT_TYPE_NAME);
       assertTrue("unable to drop type integer"ret);
 
       boolean exceptionThrown = false;
       try {
         .getType(Constants.INT_TYPE_NAME);
       } catch (NoSuchObjectException e) {
         exceptionThrown = true;
       }
       assertTrue("Expected NoSuchObjectException"exceptionThrown);
     } catch (Exception e) {
       ..println(StringUtils.stringifyException(e));
       ..println("testSimpleTypeApi() failed.");
       throw e;
     }
   }
 
   // TODO:pc need to enhance this with complex fields and getType_all function
   public void testComplexTypeApi() throws Exception {
     try {
       .dropType("Person");
 
       Type typ1 = new Type();
       typ1.setName("Person");
       typ1.setFields(new ArrayList<FieldSchema>(2));
       typ1.getFields().add(
           new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
       typ1.getFields().add(
           new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
       boolean ret = .createType(typ1);
       assertTrue("Unable to create type"ret);
 
       Type typ1_2 = .getType("Person");
       assertNotNull("type Person not found"typ1_2);
       assertEquals(typ1.getName(), typ1_2.getName());
       assertEquals(typ1.getFields().size(), typ1_2.getFields().size());
       assertEquals(typ1.getFields().get(0), typ1_2.getFields().get(0));
       assertEquals(typ1.getFields().get(1), typ1_2.getFields().get(1));
 
       .dropType("Family");
 
       Type fam = new Type();
       fam.setName("Family");
       fam.setFields(new ArrayList<FieldSchema>(2));
       fam.getFields().add(
           new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
       fam.getFields().add(
           new FieldSchema("members",
               MetaStoreUtils.getListType(typ1.getName()), ""));
 
       ret = .createType(fam);
       assertTrue("Unable to create type " + fam.getName(), ret);
 
       Type fam2 = .getType("Family");
       assertNotNull("type Person not found"fam2);
       assertEquals(fam.getName(), fam2.getName());
       assertEquals(fam.getFields().size(), fam2.getFields().size());
       assertEquals(fam.getFields().get(0), fam2.getFields().get(0));
       assertEquals(fam.getFields().get(1), fam2.getFields().get(1));
 
       ret = .dropType("Family");
       assertTrue("unable to drop type Family"ret);
 
       ret = .dropType("Person");
       assertTrue("unable to drop type Person"ret);
 
       boolean exceptionThrown = false;
       try {
         .getType("Person");
       } catch (NoSuchObjectException e) {
         exceptionThrown = true;
       }
       assertTrue("Expected NoSuchObjectException"exceptionThrown);
     } catch (Exception e) {
       ..println(StringUtils.stringifyException(e));
       ..println("testComplexTypeApi() failed.");
       throw e;
     }
   }
 
   public void testSimpleTable() throws Exception {
     try {
       String dbName = "simpdb";
       String tblName = "simptbl";
       String tblName2 = "simptbl2";
       String typeName = "Person";
 
       .dropTable(dbNametblName);
       silentDropDatabase(dbName);
 
       Database db = new Database();
       db.setName(dbName);
       .createDatabase(db);
 
       .dropType(typeName);
       Type typ1 = new Type();
       typ1.setName(typeName);
       typ1.setFields(new ArrayList<FieldSchema>(2));
       typ1.getFields().add(
           new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
       typ1.getFields().add(
           new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
       .createType(typ1);
 
       Table tbl = new Table();
       tbl.setDbName(dbName);
       tbl.setTableName(tblName);
       StorageDescriptor sd = new StorageDescriptor();
       tbl.setSd(sd);
       sd.setCols(typ1.getFields());
       sd.setCompressed(false);
       sd.setNumBuckets(1);
       sd.setParameters(new HashMap<StringString>());
       sd.getParameters().put("test_param_1""Use this for comments etc");
       sd.setBucketCols(new ArrayList<String>(2));
       sd.getBucketCols().add("name");
       sd.setSerdeInfo(new SerDeInfo());
       sd.getSerdeInfo().setName(tbl.getTableName());
       sd.getSerdeInfo().setParameters(new HashMap<StringString>());
       sd.getSerdeInfo().getParameters().put(
           org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
           org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
       tbl.setPartitionKeys(new ArrayList<FieldSchema>());
 
       .createTable(tbl);
 
       if () {
         // the createTable() above does not update the location in the 'tbl'
         // object when the client is a thrift client and the code below relies
         // on the location being present in the 'tbl' object - so get the table
         // from the metastore
         tbl = .getTable(dbNametblName);
       }
 
       Table tbl2 = .getTable(dbNametblName);
       assertNotNull(tbl2);
       assertEquals(tbl2.getDbName(), dbName);
       assertEquals(tbl2.getTableName(), tblName);
       assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
       assertEquals(tbl2.getSd().isCompressed(), false);
       assertEquals(tbl2.getSd().getNumBuckets(), 1);
       assertEquals(tbl2.getSd().getLocation(), tbl.getSd().getLocation());
       assertNotNull(tbl2.getSd().getSerdeInfo());
       sd.getSerdeInfo().setParameters(new HashMap<StringString>());
       sd.getSerdeInfo().getParameters().put(
           org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
 
       tbl2.setTableName(tblName2);
       tbl2.setParameters(new HashMap<StringString>());
       tbl2.getParameters().put("EXTERNAL""TRUE");
       tbl2.getSd().setLocation(tbl.getSd().getLocation() + "-2");
 
       List<FieldSchemafieldSchemas = .getFields(dbNametblName);
       assertNotNull(fieldSchemas);
       assertEquals(fieldSchemas.size(), tbl.getSd().getCols().size());
       for (FieldSchema fs : tbl.getSd().getCols()) {
         assertTrue(fieldSchemas.contains(fs));
       }
 
       List<FieldSchemafieldSchemasFull = .getSchema(dbNametblName);
       assertNotNull(fieldSchemasFull);
       assertEquals(fieldSchemasFull.size(), tbl.getSd().getCols().size()
           + tbl.getPartitionKeys().size());
       for (FieldSchema fs : tbl.getSd().getCols()) {
         assertTrue(fieldSchemasFull.contains(fs));
       }
       for (FieldSchema fs : tbl.getPartitionKeys()) {
         assertTrue(fieldSchemasFull.contains(fs));
       }
 
       .createTable(tbl2);
       if () {
         tbl2 = .getTable(tbl2.getDbName(), tbl2.getTableName());
       }
 
       Table tbl3 = .getTable(dbNametblName2);
       assertNotNull(tbl3);
       assertEquals(tbl3.getDbName(), dbName);
       assertEquals(tbl3.getTableName(), tblName2);
       assertEquals(tbl3.getSd().getCols().size(), typ1.getFields().size());
       assertEquals(tbl3.getSd().isCompressed(), false);
       assertEquals(tbl3.getSd().getNumBuckets(), 1);
       assertEquals(tbl3.getSd().getLocation(), tbl2.getSd().getLocation());
       assertEquals(tbl3.getParameters(), tbl2.getParameters());
 
       fieldSchemas = .getFields(dbNametblName2);
       assertNotNull(fieldSchemas);
       assertEquals(fieldSchemas.size(), tbl2.getSd().getCols().size());
       for (FieldSchema fs : tbl2.getSd().getCols()) {
         assertTrue(fieldSchemas.contains(fs));
       }
 
       fieldSchemasFull = .getSchema(dbNametblName2);
       assertNotNull(fieldSchemasFull);
       assertEquals(fieldSchemasFull.size(), tbl2.getSd().getCols().size()
           + tbl2.getPartitionKeys().size());
       for (FieldSchema fs : tbl2.getSd().getCols()) {
         assertTrue(fieldSchemasFull.contains(fs));
       }
       for (FieldSchema fs : tbl2.getPartitionKeys()) {
         assertTrue(fieldSchemasFull.contains(fs));
       }
 
       assertEquals("Use this for comments etc"tbl2.getSd().getParameters()
           .get("test_param_1"));
       assertEquals("name"tbl2.getSd().getBucketCols().get(0));
       assertTrue("Partition key list is not empty",
           (tbl2.getPartitionKeys() == null)
               || (tbl2.getPartitionKeys().size() == 0));
 
       FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), );
       .dropTable(dbNametblName);
       assertFalse(fs.exists(new Path(tbl.getSd().getLocation())));
 
       .dropTable(dbNametblName2);
       assertTrue(fs.exists(new Path(tbl2.getSd().getLocation())));
 
       .dropType(typeName);
       .dropDatabase(dbName);
     } catch (Exception e) {
       ..println(StringUtils.stringifyException(e));
       ..println("testSimpleTable() failed.");
       throw e;
     }
   }
 
   public void testAlterTable() throws Exception {
     String dbName = "alterdb";
     String invTblName = "alter-tbl";
     String tblName = "altertbl";
 
     try {
       .dropTable(dbNametblName);
       silentDropDatabase(dbName);
 
       Database db = new Database();
       db.setName(dbName);
       .createDatabase(db);
 
       ArrayList<FieldSchemainvCols = new ArrayList<FieldSchema>(2);
       invCols.add(new FieldSchema("n-ame", Constants.STRING_TYPE_NAME, ""));
       invCols.add(new FieldSchema("in.come", Constants.INT_TYPE_NAME, ""));
 
       Table tbl = new Table();
       tbl.setDbName(dbName);
       tbl.setTableName(invTblName);
       StorageDescriptor sd = new StorageDescriptor();
       tbl.setSd(sd);
       sd.setCols(invCols);
       sd.setCompressed(false);
       sd.setNumBuckets(1);
       sd.setParameters(new HashMap<StringString>());
       sd.getParameters().put("test_param_1""Use this for comments etc");
       sd.setBucketCols(new ArrayList<String>(2));
       sd.getBucketCols().add("name");
       sd.setSerdeInfo(new SerDeInfo());
       sd.getSerdeInfo().setName(tbl.getTableName());
       sd.getSerdeInfo().setParameters(new HashMap<StringString>());
       sd.getSerdeInfo().getParameters().put(
           org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
       boolean failed = false;
       try {
         .createTable(tbl);
       } catch (InvalidObjectException ex) {
         failed = true;
       }
       if (!failed) {
         assertTrue("Able to create table with invalid name: " + invTblName,
             false);
       }
       ArrayList<FieldSchemacols = new ArrayList<FieldSchema>(2);
       cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
       cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
 
       // create a valid table
       tbl.setTableName(tblName);
       tbl.getSd().setCols(cols);
       .createTable(tbl);
 
       if () {
         tbl = .getTable(tbl.getDbName(), tbl.getTableName());
       }
 
       // now try to invalid alter table
       Table tbl2 = .getTable(dbNametblName);
       failed = false;
       try {
         tbl2.setTableName(invTblName);
         tbl2.getSd().setCols(invCols);
         .alter_table(dbNametblNametbl2);
       } catch (InvalidOperationException ex) {
         failed = true;
       }
       if (!failed) {
         assertTrue("Able to rename table with invalid name: " + invTblName,
             false);
       }
       // try a valid alter table
       tbl2.setTableName(tblName + "_renamed");
       tbl2.getSd().setCols(cols);
       tbl2.getSd().setNumBuckets(32);
       .alter_table(dbNametblNametbl2);
       Table tbl3 = .getTable(dbNametbl2.getTableName());
       assertEquals("Alter table didn't succeed. Num buckets is different ",
           tbl2.getSd().getNumBuckets(), tbl3.getSd().getNumBuckets());
       // check that data has moved
       FileSystem fs = FileSystem.get((new Path(tbl.getSd().getLocation())).toUri(), );
       assertFalse("old table location still exists"fs.exists(new Path(tbl
           .getSd().getLocation())));
       assertTrue("data did not move to new location"fs.exists(new Path(tbl3
           .getSd().getLocation())));
 
       if (!) {
         assertEquals("alter table didn't move data correct location"tbl3
             .getSd().getLocation(), tbl2.getSd().getLocation());
       }
     } catch (Exception e) {
       ..println(StringUtils.stringifyException(e));
       ..println("testSimpleTable() failed.");
       throw e;
     } finally {
       silentDropDatabase(dbName);
     }
   }
 
   public void testComplexTable() throws Exception {
 
     String dbName = "compdb";
     String tblName = "comptbl";
     String typeName = "Person";
 
     try {
       .dropTable(dbNametblName);
       silentDropDatabase(dbName);
       Database db = new Database();
       db.setName(dbName);
       .createDatabase(db);
 
       .dropType(typeName);
       Type typ1 = new Type();
       typ1.setName(typeName);
       typ1.setFields(new ArrayList<FieldSchema>(2));
       typ1.getFields().add(
           new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
       typ1.getFields().add(
           new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
       .createType(typ1);
 
       Table tbl = new Table();
       tbl.setDbName(dbName);
       tbl.setTableName(tblName);
       StorageDescriptor sd = new StorageDescriptor();
       tbl.setSd(sd);
       sd.setCols(typ1.getFields());
       sd.setCompressed(false);
       sd.setNumBuckets(1);
       sd.setParameters(new HashMap<StringString>());
       sd.getParameters().put("test_param_1""Use this for comments etc");
       sd.setBucketCols(new ArrayList<String>(2));
       sd.getBucketCols().add("name");
       sd.setSerdeInfo(new SerDeInfo());
       sd.getSerdeInfo().setName(tbl.getTableName());
       sd.getSerdeInfo().setParameters(new HashMap<StringString>());
       sd.getSerdeInfo().getParameters().put(
           org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "9");
           org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
 
       tbl.setPartitionKeys(new ArrayList<FieldSchema>(2));
       tbl.getPartitionKeys().add(
           new FieldSchema("ds",
               org.apache.hadoop.hive.serde.Constants.DATE_TYPE_NAME, ""));
       tbl.getPartitionKeys().add(
           new FieldSchema("hr",
               org.apache.hadoop.hive.serde.Constants.INT_TYPE_NAME, ""));
 
       .createTable(tbl);
 
       Table tbl2 = .getTable(dbNametblName);
       assertEquals(tbl2.getDbName(), dbName);
       assertEquals(tbl2.getTableName(), tblName);
       assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
       assertFalse(tbl2.getSd().isCompressed());
       assertEquals(tbl2.getSd().getNumBuckets(), 1);
 
       assertEquals("Use this for comments etc"tbl2.getSd().getParameters()
           .get("test_param_1"));
       assertEquals("name"tbl2.getSd().getBucketCols().get(0));
 
       assertNotNull(tbl2.getPartitionKeys());
       assertEquals(2, tbl2.getPartitionKeys().size());
       assertEquals(Constants.DATE_TYPE_NAME, tbl2.getPartitionKeys().get(0)
           .getType());
       assertEquals(Constants.INT_TYPE_NAME, tbl2.getPartitionKeys().get(1)
           .getType());
       assertEquals("ds"tbl2.getPartitionKeys().get(0).getName());
       assertEquals("hr"tbl2.getPartitionKeys().get(1).getName());