Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
 
 
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.File;
 import java.net.URI;
 
 import  org.apache.hadoop.mapred.InputFormat;
 import  org.apache.hadoop.fs.Path;

PartitionDesc.
 
 @Explain(displayName = "Partition")
 public class PartitionDesc implements SerializableCloneable {
   private static final long serialVersionUID = 2L;
   private TableDesc tableDesc;
   private Class<? extends InputFormat> inputFileFormatClass;
   private Class<? extends HiveOutputFormatoutputFileFormatClass;
   private java.util.Properties properties;
   private String serdeClassName;
   
   private transient String baseFileName;
 
   public void setBaseFileName(String baseFileName) {
     this. = baseFileName;
   }
 
   public PartitionDesc() {
   }
 
   public PartitionDesc(final TableDesc table,
       final java.util.LinkedHashMap<StringStringpartSpec) {
     this(tablepartSpecnullnullnullnullnull);
   }
 
   public PartitionDesc(final TableDesc table,
       final java.util.LinkedHashMap<StringStringpartSpec,
       final Class<? extends DeserializerserdeClass,
       final Class<? extends InputFormat> inputFileFormatClass,
       final Class<?> outputFormatfinal java.util.Properties properties,
       final String serdeClassName) {
     this. = table;
     this. = partSpec;
      = serdeClass;
     this. = inputFileFormatClass;
     if (outputFormat != null) {
        = HiveFileFormatUtils
           .getOutputFormatSubstitute(outputFormat);
     }
     this. = properties;
     if (properties != null) {
       this. = properties
     }
   }
 
       throws HiveException {
      = Utilities.getTableDesc(part.getTable());
      = part.getSpec();
      = part.getSchema();
     ;
   }
 
   @Explain(displayName = "")
   public TableDesc getTableDesc() {
    return ;
  }
  public void setTableDesc(TableDesc tableDesc) {
    this. = tableDesc;
  }
  @Explain(displayName = "partition values")
    return ;
  }
  public void setPartSpec(final java.util.LinkedHashMap<StringStringpartSpec) {
    this. = partSpec;
  }
    if ( == null &&  != null) {
    }
    return ;
  }
  public void setDeserializerClass(
      final java.lang.Class<? extends org.apache.hadoop.hive.serde2.DeserializerserdeClass) {
     = serdeClass;
  }
  public Class<? extends InputFormat> getInputFileFormatClass() {
    if ( == null &&  != null) {
    }
    return ;
  }

  
Return a deserializer object corresponding to the tableDesc.
  public Deserializer getDeserializer() throws Exception {
    de.initialize(null);
    return de;
  }
  public void setInputFileFormatClass(
      final Class<? extends InputFormat> inputFileFormatClass) {
    this. = inputFileFormatClass;
  }
  public Class<? extends HiveOutputFormatgetOutputFileFormatClass() {
    if ( == null &&  != null) {
    }
    return ;
  }
  public void setOutputFileFormatClass(final Class<?> outputFileFormatClass) {
    this. = HiveFileFormatUtils
        .getOutputFormatSubstitute(outputFileFormatClass);
  }
  @Explain(displayName = "properties", normalExplain = false)
    if ( == null &&  != null) {
      return .getProperties();
    }
    return ;
  }
  public void setProperties(final java.util.Properties properties) {
    this. = properties;
  }

  

Returns:
the serdeClassName
  @Explain(displayName = "serde")
  public String getSerdeClassName() {
    if ( == null &&  != null) {
    }
    return ;
  }

  

Parameters:
serdeClassName the serde Class Name to set
  public void setSerdeClassName(String serdeClassName) {
    this. = serdeClassName;
  }
  @Explain(displayName = "name")
  public String getTableName() {
    return getProperties().getProperty(
        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_NAME);
  }
  @Explain(displayName = "input format")
    return getInputFileFormatClass().getName();
  }
  @Explain(displayName = "output format")
  }
  @Explain(displayName = "base file name", normalExplain = false)
  public String getBaseFileName() {
    return ;
  }
  public PartitionDesc clone() {
    PartitionDesc ret = new PartitionDesc();
    ret.inputFileFormatClass = ;
    ret.outputFileFormatClass = ;
    if ( != null) {
      Properties newProp = new Properties();
      Enumeration<ObjectkeysProp = .keys();
      while (keysProp.hasMoreElements()) {
        Object key = keysProp.nextElement();
        newProp.put(key.get(key));
      }
      ret.setProperties(newProp);
    }
    ret.tableDesc = (TableDesc.clone();
    // The partition spec is not present
    if ( != null) {
      ret.partSpec = new java.util.LinkedHashMap<StringString>();
      ret.partSpec.putAll();
    }
    return ret;
  }

  
Attempt to derive a virtual base file name property from the path. If path format is unrecognized, just use the full path.

Parameters:
path URI to the partition file
  void deriveBaseFileName(String path) {
    
    if (path == null) {
      return;
    }
    try {
      Path p = new Path(path);
       = p.getName();
    } catch (Exception ex) {
      // don't really care about the exception. the goal is to capture the
      // the last component at the minimum - so set to the complete path
       = path;
    }
  }
New to GrepCode? Check out our FAQ X