Start line:  
End line:  

Snippet Preview

Snippet HTML Code

Stack Overflow Questions
Copyright 2013 Cloudera Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
 package org.kitesdk.maven.plugins;
 import java.util.List;
Run a Hadoop tool on the local machine.
 @Mojo(name = "run-tool", requiresDependencyResolution = .)
 public class RunToolMojo extends AbstractHadoopMojo {

The tool class to run. The specified class must have a standard Java main method.
   @Parameter(property = "kite.toolClass", required = true)
   private String toolClass;

Arguments to pass to the tool, in addition to those generated by addDependenciesToDistributedCache and hadoopConfiguration.
   @Parameter(property = "kite.args")
   private String[] args;

Whether to add dependencies in the runtime classpath to Hadoop's distributed cache so that they are added to the classpath for MapReduce tasks (via -libjars).
   @Parameter(property = "kite.addDependenciesToDistributedCache",
       defaultValue = "true")
   private boolean addDependenciesToDistributedCache;

Hadoop configuration properties. WARNING: This configuration setting is not compatible with the factory methods in because it does not alter the environment configuration. For example, if using this to modify the environment's "fs.defaultFS" property in the tool that is run by this Mojo, opening a repo by URI will continue to use the environment's default FS. Configuration properties set using this option will only affect the org.apache.hadoop.conf.Configuration objects passed by org.apache.hadoop.util.ToolRunner or created by org.apache.hadoop.util.GenericOptionsParser.
   @Parameter(property = "kite.hadoopConfiguration")
   public void execute() throws MojoExecutionExceptionMojoFailureException {
     List<StringlibJars = new ArrayList<String>();
     List<URLclasspath = new ArrayList<URL>();
     File mainArtifactFile = new File(.getBuild().getDirectory(),
         .getBuild().getFinalName() + ".jar");
     if (!mainArtifactFile.exists()) {
       throw new MojoExecutionException("Main artifact missing: " + mainArtifactFile);
     for (Object a : .getRuntimeArtifacts()) {
       File file = ((Artifacta).getFile();
     final List<StringcommandArgs = new ArrayList<String>();
     for (String key : .stringPropertyNames()) {
      String value = .getProperty(key);
      commandArgs.add(key + "=" + value);
    if ( != null) {
      for (String arg : ) {
    getLog().debug("Running tool with args: " + commandArgs);
    getLog().debug("Running tool with classpath: " + classpath);
    Thread executionThread = new Thread() {
      public void run() {
        try {
          Method main = Thread.currentThread().getContextClassLoader().loadClass()
              .getMethod("main"new Class[]{ String[].class });
          main.invoke(nullnew Object[] { commandArgs.toArray(new String[commandArgs.size()]) });
        } catch (Exception e) {
              Thread.currentThread(), e);
    ClassLoader parentClassLoader = getClass().getClassLoader(); // use Maven's classloader, not the system one
    ClassLoader classLoader = new URLClassLoader(
        classpath.toArray(new URL[classpath.size()]), parentClassLoader);
    try {
    } catch (InterruptedException e) {
      getLog().warn("interrupted while joining against thread " + executionThreade);
  private URL toURL(File filethrows MojoExecutionException {
    try {
      return file.toURI().toURL();
    } catch (MalformedURLException e) {
      throw new MojoExecutionException("Can't convert file  to URL: " + filee);
New to GrepCode? Check out our FAQ X