Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
Copyright [2013-2014] eBay Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
package ml.shifu.guagua.mapreduce.example.sum;
import  org.apache.hadoop.conf.Configuration;
import  org.apache.hadoop.fs.Path;
import  org.apache.hadoop.mapreduce.Job;
import  org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import  org.apache.hadoop.util.GenericOptionsParser;

Entry point for Guagua Sum job.

Use GuaguaMapReduceClient please.
public class SumMapReduceClient {
    public static void main(String[] argsthrows Exception {
        Configuration conf = new Configuration();
        String[] otherArgs = new GenericOptionsParser(confargs).getRemainingArgs();
        if(otherArgs.length < 3) {
            throw new IllegalArgumentException(
                    "NNMapReduceClient: Must have at least 2 arguments <guagua.iteration.count> <guagua.zk.servers> <input path or folder>. ");
        conf.setInt(., 300 * 1000);
        conf.setInt(., 5);
        conf.setInt(., 1000);
        // if you set result class to hadoop Writable, you must use GuaguaWritableSerializer, this can be avoided by
        // using GuaguaMapReduceClient
        conf.set(., LongWritable.class.getName());
        conf.set(., LongWritable.class.getName());
        conf.setInt(., 3600000);
        conf.setInt(., 0);
        Job job = new Job(conf"Guagua Sum Master-Workers Job");
        FileInputFormat.addInputPath(jobnew Path(otherArgs[2]));
New to GrepCode? Check out our FAQ X