Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
 
 
 package org.apache.mahout.classifier.bayes.mapreduce.cbayes;
 
 import  org.apache.hadoop.fs.FileSystem;
 import  org.apache.hadoop.fs.Path;
 import  org.apache.hadoop.io.DefaultStringifier;
 import  org.apache.hadoop.io.DoubleWritable;
 import  org.apache.hadoop.mapred.FileInputFormat;
 import  org.apache.hadoop.mapred.FileOutputFormat;
 import  org.apache.hadoop.mapred.JobClient;
 import  org.apache.hadoop.mapred.JobConf;
 import  org.apache.hadoop.mapred.SequenceFileInputFormat;
 import  org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import  org.apache.hadoop.util.GenericsUtil;
 
 import java.util.Map;

Create and run the Bayes Trainer.
 
 public class CBayesThetaNormalizerDriver implements BayesJob {
 
   private static final Logger log = LoggerFactory.getLogger(CBayesThetaNormalizerDriver.class);

  
Run the job

Parameters:
input the input pathname String
output the output pathname String
 
   @Override
   public void runJob(String inputString outputBayesParameters paramsthrows IOException {
     JobClient client = new JobClient();
     JobConf conf = new JobConf(CBayesThetaNormalizerDriver.class);
     conf.setJobName("Complementary Bayes Theta Normalizer Driver running over input: " +  input);
 
 
     conf.setOutputKeyClass(StringTuple.class);
     conf.setOutputValueClass(DoubleWritable.class);
     FileInputFormat.addInputPath(confnew Path(output + "/trainer-weights/Sigma_j"));
     FileInputFormat.addInputPath(confnew Path(output + "/trainer-tfIdf/trainer-tfIdf"));
     Path outPath = new Path(output + "/trainer-thetaNormalizer");
     FileOutputFormat.setOutputPath(confoutPath);
     //conf.setNumMapTasks(100);
     //conf.setNumReduceTasks(1);
     conf.setMapperClass(CBayesThetaNormalizerMapper.class);
     conf.setInputFormat(SequenceFileInputFormat.class);
     conf.setCombinerClass(CBayesThetaNormalizerReducer.class);
     conf.setReducerClass(CBayesThetaNormalizerReducer.class);
     conf.setOutputFormat(SequenceFileOutputFormat.class);
     conf.set("io.serializations",
         "org.apache.hadoop.io.serializer.JavaSerialization,org.apache.hadoop.io.serializer.WritableSerialization");
     // Dont ever forget this. People should keep track of how hadoop conf parameters and make or break a piece of code
 
     FileSystem dfs = FileSystem.get(outPath.toUri(), conf);
     if (dfs.exists(outPath)) {
       dfs.delete(outPathtrue);
     }
 
     Path Sigma_kFiles = new Path(output + "/trainer-weights/Sigma_k/*");
     Map<StringDoublelabelWeightSum = SequenceFileModelReader.readLabelSums(dfsSigma_kFilesconf);
     DefaultStringifier<Map<StringDouble>> mapStringifier =
         new DefaultStringifier<Map<StringDouble>>(conf, GenericsUtil.getClass(labelWeightSum));
     String labelWeightSumString = mapStringifier.toString(labelWeightSum);
 
     .info("Sigma_k for Each Label");
     Map<StringDoublec = mapStringifier.fromString(labelWeightSumString);
     .info("{}"c);
     conf.set("cnaivebayes.sigma_k"labelWeightSumString);
 
 
     Path sigma_kSigma_jFile = new Path(output + "/trainer-weights/Sigma_kSigma_j/*");
     double sigma_jSigma_k = SequenceFileModelReader.readSigma_jSigma_k(dfssigma_kSigma_jFileconf);
     DefaultStringifier<Doublestringifier = new DefaultStringifier<Double>(confDouble.class);
     String sigma_jSigma_kString = stringifier.toString(sigma_jSigma_k);
 
     .info("Sigma_kSigma_j for each Label and for each Features");
     double retSigma_jSigma_k = stringifier.fromString(sigma_jSigma_kString);
    .info("{}"retSigma_jSigma_k);
    conf.set("cnaivebayes.sigma_jSigma_k"sigma_jSigma_kString);
    Path vocabCountFile = new Path(output + "/trainer-tfIdf/trainer-vocabCount/*");
    double vocabCount = SequenceFileModelReader.readVocabCount(dfsvocabCountFileconf);
    String vocabCountString = stringifier.toString(vocabCount);
    
    .info("Vocabulary Count");
    conf.set("cnaivebayes.vocabCount"vocabCountString);
    double retvocabCount = stringifier.fromString(vocabCountString);
    .info("{}"retvocabCount);
    conf.set("bayes.parameters"params.toString());
    conf.set("output.table"output);
    client.setConf(conf);
    JobClient.runJob(conf);
  }
New to GrepCode? Check out our FAQ X