Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
 
 
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.File;
 import java.util.List;
 
 import  org.apache.hadoop.fs.Path;
MapredLocalWork.
 
 @Explain(displayName = "Map Reduce Local Work")
 public class MapredLocalWork implements Serializable {
   private static final long serialVersionUID = 1L;
 
   private LinkedHashMap<StringOperator<? extends Serializable>> aliasToWork;
   private boolean inputFileChangeSensitive;
   private String tmpFileURI;
   private String stageID;
 
   private List<Operator<? extends Serializable>> dummyParentOp ;
 
   public MapredLocalWork() {
 
   }
 
   public MapredLocalWork(
       final LinkedHashMap<StringOperator<? extends Serializable>> aliasToWork,
       final LinkedHashMap<StringFetchWorkaliasToFetchWork) {
     this. = aliasToWork;
     this. = aliasToFetchWork;
 
   }
 
   public MapredLocalWork(MapredLocalWork clone){
     this. = clone.tmpFileURI;
     this.=clone.inputFileChangeSensitive;
 
   }
 
 
   public void setDummyParentOp(List<Operator<? extends Serializable>> op){
     this.=op;
   }
 
 
   public List<Operator<? extends Serializable>> getDummyParentOp(){
     return this.;
   }
 
 
   @Explain(displayName = "Alias -> Map Local Operator Tree")
   public LinkedHashMap<StringOperator<? extends Serializable>> getAliasToWork() {
     return ;
   }
 
   public String getStageID() {
     return ;
   }
 
   public void setStageID(String stageID) {
     this. = stageID;
   }
 
   public void setAliasToWork(
       final LinkedHashMap<StringOperator<? extends Serializable>> aliasToWork) {
     this. = aliasToWork;
   }

  

Returns:
the aliasToFetchWork
 
  @Explain(displayName = "Alias -> Map Local Tables")
    return ;
  }

  

Parameters:
aliasToFetchWork the aliasToFetchWork to set
  public void setAliasToFetchWork(
      final LinkedHashMap<StringFetchWorkaliasToFetchWork) {
    this. = aliasToFetchWork;
  }
  public boolean getInputFileChangeSensitive() {
  }
  public void setInputFileChangeSensitive(boolean inputFileChangeSensitive) {
    this. = inputFileChangeSensitive;
  }
  public void deriveExplainAttributes() {
    if ( != null) {
    }
    for (FetchWork fetchWork : .values()) {
      if (fetchWork.getTblDesc() == null) {
        continue;
      }
        fetchWork.getTblDesc());
    }
  }
  @Explain(displayName = "Bucket Mapjoin Context", normalExplain = false)
    return ;
  }
  public void setBucketMapjoinContext(BucketMapJoinContext bucketMapjoinContext) {
    this. = bucketMapjoinContext;
  }
  public void setTmpFileURI(String tmpFileURI) {
    this. = tmpFileURI;
  }
  public String getTmpFileURI() {
    return ;
  }
  public static class BucketMapJoinContext implements Serializable {
    private static final long serialVersionUID = 1L;
    // used for bucket map join
    private String mapJoinBigTableAlias;
    private Class<? extends BucketMatcherbucketMatcherClass;
    public void setMapJoinBigTableAlias(String bigTableAlias) {
      this. = bigTableAlias;
    }
    public void deriveBucketMapJoinMapping() {
      if ( != null) {
        Iterator<Entry<StringLinkedHashMap<StringArrayList<String>>>> iter =
        while (iter.hasNext()) {
          Entry<StringLinkedHashMap<StringArrayList<String>>> old = iter.next();
          LinkedHashMap<StringArrayList<String>> newBucketBaseFileNameMapping = new LinkedHashMap<StringArrayList<String>>();
          Iterator<Entry<StringArrayList<String>>> oldAliasFileNameMappingIter = old.getValue().entrySet().iterator();
          while (oldAliasFileNameMappingIter.hasNext()) {
            //For a give table and its bucket full file path list, only keep the base file name (remove file path etc).
            //And put the new list into the new mapping.
            Entry<StringArrayList<String>> oldTableBucketFileNames =  oldAliasFileNameMappingIter.next();
            ArrayList<StringoldTableBucketNames = oldTableBucketFileNames.getValue();
            ArrayList<StringnewTableBucketFileBaseName = new ArrayList<String> (oldTableBucketNames.size());
            //for each bucket file, only keep its base files and store into a new list.
            if (oldTableBucketNames != null) {
              for (String bucketFName : oldTableBucketNames) {
                newTableBucketFileBaseName.add(getBaseFileName(bucketFName));
              }
            }
            String bigTblBucketFileName = getBaseFileName(oldTableBucketFileNames.getKey());
            if(newBucketBaseFileNameMapping.containsKey(bigTblBucketFileName)) {
              String fullPath = oldTableBucketFileNames.getKey();
              String dir = getBaseFileName(fullPath.substring(0, fullPath.lastIndexOf(bigTblBucketFileName)));
              bigTblBucketFileName = dir + . + bigTblBucketFileName;
            }
            //put the new mapping
            newBucketBaseFileNameMapping.put(bigTblBucketFileNamenewTableBucketFileBaseName);
          }
          String tableAlias = old.getKey();
          .put(tableAliasnewBucketBaseFileNameMapping);
        }
      }
    }
    private String getBaseFileName (String path) {
      try {
	return ((new Path(path)).getName());
      } catch (Exception ex) {
        // This could be due to either URI syntax error or File constructor
        // illegal arg; we don't really care which one it is.
        return path;
      }
    }
    public String getMapJoinBigTableAlias() {
      return ;
    }
    public Class<? extends BucketMatchergetBucketMatcherClass() {
      return ;
    }
    public void setBucketMatcherClass(
        Class<? extends BucketMatcherbucketMatcherClass) {
      this. = bucketMatcherClass;
    }
    @Explain(displayName = "Alias Bucket File Name Mapping", normalExplain = false)
      return ;
    }
    public void setAliasBucketFileNameMapping(
        LinkedHashMap<StringLinkedHashMap<StringArrayList<String>>> aliasBucketFileNameMapping) {
      this. = aliasBucketFileNameMapping;
    }
    @Override
    public String toString() {
      if ( != null) {
        return "Mapping:" + .toString();
      } else {
        return "";
      }
    }
    @Explain(displayName = "Alias Bucket Base File Name Mapping", normalExplain = false)
    }
        LinkedHashMap<StringLinkedHashMap<StringArrayList<String>>> aliasBucketBaseFileNameMapping) {
      this. = aliasBucketBaseFileNameMapping;
    }
    @Explain(displayName = "Alias Bucket Output File Name Mapping", normalExplain = false)
      return ;
    }
    public void setBucketFileNameMapping(LinkedHashMap<StringIntegerbucketFileNameMapping) {
      this. = bucketFileNameMapping;
    }
  }
New to GrepCode? Check out our FAQ X