Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
  package org.apache.hadoop.hive.ql.history;

Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
 
 
 import java.util.Map;
 
 
 import  org.apache.hadoop.fs.FileSystem;
 import  org.apache.hadoop.fs.Path;
 import  org.apache.hadoop.hive.cli.CliSessionState;
 import  org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import  org.apache.hadoop.mapred.TextInputFormat;

TestHiveHistory.
 
 public class TestHiveHistory extends TestCase {
 
   static HiveConf conf;
 
   private static String tmpdir = "/tmp/" + System.getProperty("user.name")
       + "/";
   private static Path tmppath = new Path();
   private static Hive db;
   private static FileSystem fs;
   private QTestSetup setup;
 
   /*
    * intialize the tables
    */
 
   @Override
   protected void setUp() {
     try {
        = new HiveConf(HiveHistory.class);
 
        = FileSystem.get();
       if (.exists() && !.getFileStatus().isDir()) {
         throw new RuntimeException( + " exists but is not a directory");
       }
 
       if (!.exists()) {
         if (!.mkdirs()) {
           throw new RuntimeException("Could not make scratch directory "
               + );
         }
       }
 
        = new QTestSetup();
       .preTest();
 
       // copy the test files into hadoop if required.
       int i = 0;
       Path[] hadoopDataFile = new Path[2];
       String[] testFiles = {"kv1.txt""kv2.txt"};
       String testFileDir = "file://"
           + .get("test.data.files").replace('\\''/').replace("c:""");
       for (String oneFile : testFiles) {
         Path localDataFile = new Path(testFileDironeFile);
         hadoopDataFile[i] = new Path(oneFile);
         .copyFromLocalFile(falsetruelocalDataFilehadoopDataFile[i]);
         i++;
       }
 
       // load the test files into tables
       i = 0;
        = Hive.get();
       String[] srctables = {"src""src2"};
      LinkedList<Stringcols = new LinkedList<String>();
      cols.add("key");
      cols.add("value");
      for (String src : srctables) {
        .dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, srctruetrue);
        .createTable(srccolsnull, TextInputFormat.class,
            IgnoreKeyTextOutputFormat.class);
        .loadTable(hadoopDataFile[i], srcfalsefalse);
        i++;
      }
    } catch (Throwable e) {
      e.printStackTrace();
      throw new RuntimeException("Encountered throwable");
    }
  }
  protected void tearDown() {
    try {
      .tearDown();
    }
    catch (Exception e) {
      ..println("Exception: " + e.getMessage());
      e.printStackTrace();
      ..flush();
      fail("Unexpected exception in tearDown");
    }
  }

  
Check history file output for this query.
  public void testSimpleQuery() {
    new LineageInfo();
    try {
      // NOTE: It is critical to do this here so that log4j is reinitialized
      // before
      // any of the other core hive classes are loaded
      SessionState.initHiveLog4j();
      CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
      ss.in = .;
      try {
        ss.out = new PrintStream(.true"UTF-8");
        ss.err = new PrintStream(.true"UTF-8");
      } catch (UnsupportedEncodingException e) {
        System.exit(3);
      }
      SessionState.start(ss);
      String cmd = "select a.key from src a";
      Driver d = new Driver();
      int ret = d.run(cmd).getResponseCode();
      if (ret != 0) {
        fail("Failed");
      }
      HiveHistoryViewer hv = new HiveHistoryViewer(SessionState.get()
          .getHiveHistory().getHistFileName());
      Map<StringQueryInfojobInfoMap = hv.getJobInfoMap();
      Map<StringTaskInfotaskInfoMap = hv.getTaskInfoMap();
      if (jobInfoMap.size() != 1) {
        fail("jobInfo Map size not 1");
      }
      if (taskInfoMap.size() != 1) {
        fail("jobInfo Map size not 1");
      }
      cmd = (StringjobInfoMap.keySet().toArray()[0];
      QueryInfo ji = jobInfoMap.get(cmd);
      if (!ji.hm.get(..name()).equals("1")) {
        fail("Wrong number of tasks");
      }
    } catch (Exception e) {
      e.printStackTrace();
      fail("Failed");
    }
  }
New to GrepCode? Check out our FAQ X