简体   繁体   中英

Why the job chaining not working in mapreduce?

I create two jobs, and I want to chain them, so that one job is executed just after the previous job is complete. So I wrote the following code. But as I have observed job1 finished correctly, and job2 never seems to execute.

public class Simpletask extends Configured implements Tool {
public static enum FileCounters {
    COUNT;
}
public static class TokenizerMapper extends Mapper<Object, Text, IntWritable, Text>{

      public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
          StringTokenizer itr = new StringTokenizer(value.toString());
          while (itr.hasMoreTokens()) {
              String line = itr.nextToken();
              String part[] = line.split(",");
              int id = Integer.valueOf(part[0]);
              int x1 = Integer.valueOf(part[1]);
              int y1 = Integer.valueOf(part[2]);
              int z1 = Integer.valueOf(part[3]);
              int x2 = Integer.valueOf(part[4]);
              int y2 = Integer.valueOf(part[5]);
              int z2 = Integer.valueOf(part[6]);
              int h_v = Hilbert(x1,y1,z1);
              int parti = h_v/10;
             IntWritable partition = new IntWritable(parti);
             Text neuron = new Text();
             neuron.set(line);
             context.write(partition,neuron);
          }
}
public int Hilbert(int x,int y,int z){
          return (int) (Math.random()*20);
      }
  }

public static class IntSumReducer extends Reducer<IntWritable,Text,IntWritable,Text> {

private Text result = new Text();
private MultipleOutputs<IntWritable, Text> mos;
public void setup(Context context) {
    mos = new MultipleOutputs<IntWritable, Text>(context);
}
<K, V> String generateFileName(K k) {
       return "p"+k.toString();
}
public void reduce(IntWritable key,Iterable<Text> values, Context context) throws IOException, InterruptedException {
    String accu = "";
    for (Text val : values) {
        String[] entry=val.toString().split(",");
        String MBR = entry[1];
        accu+=entry[0]+",MBR"+MBR+" ";
    }
    result.set(accu);
    context.getCounter(FileCounters.COUNT).increment(1);
    mos.write(key, result, generateFileName(key));
}
}

public static class RTreeMapper extends Mapper<Object, Text, IntWritable, Text>{
  public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
   System.out.println("WOWOWOWOW RUNNING");// NOTHING SHOWS UP!
  }
  }

public static class RTreeReducer extends Reducer<IntWritable,Text,IntWritable,Text> {
private MultipleOutputs<IntWritable, Text> mos;
Text t = new Text();

public void setup(Context context) {
    mos = new MultipleOutputs<IntWritable, Text>(context);
}
public void reduce(IntWritable key,Iterable<Text> values, Context context) throws IOException, InterruptedException {
    t.set("dsfs");
    mos.write(key, t, "WOWOWOWOWOW"+key.get());
//ALSO, NOTHING IS WRITTEN TO THE FILE!!!!!
}
}
public static class RTreeInputFormat extends TextInputFormat{
 protected boolean isSplitable(FileSystem fs, Path file) {
        return false;
    }
}

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
           System.err.println("Enter valid number of arguments <Inputdirectory>  <Outputlocation>");
           System.exit(0);
          }
          ToolRunner.run(new Configuration(), new Simpletask(), args);
}

@Override
public int run(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Job1");
    job.setJarByClass(Simpletask.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(Text.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);
    boolean complete = job.waitForCompletion(true);

    //================RTree Loop============
    int capacity = 3;
    Configuration rconf = new Configuration();
    Job rtreejob = Job.getInstance(rconf, "rtree");
    if(complete){
        int count =  (int) job.getCounters().findCounter(FileCounters.COUNT).getValue();
        System.out.println("File count: "+count);
        String path = null;
        for(int i=0;i<count;i++){
            path = "/Worker/p"+i+"-m-00000";
            System.out.println("Add input path: "+path);
            FileInputFormat.addInputPath(rtreejob, new Path(path));
        }
        System.out.println("Input path done.");
        FileOutputFormat.setOutputPath(rtreejob, new Path("/RTree"));
        rtreejob.setJarByClass(Simpletask.class);
        rtreejob.setMapperClass(RTreeMapper.class);
        rtreejob.setCombinerClass(RTreeReducer.class);
        rtreejob.setReducerClass(RTreeReducer.class);
        rtreejob.setOutputKeyClass(IntWritable.class);
        rtreejob.setOutputValueClass(Text.class);
        rtreejob.setInputFormatClass(RTreeInputFormat.class);
        complete = rtreejob.waitForCompletion(true);
}
    return 0;
}
}

For a mapreduce job, the output directory should not exists. It will check for the output directory first. If it is exists, the job will fail. In your case, you specified the same output directory for both the jobs. I modified your code. I changed the args[1] to args[2] in the job2. Now the third argument will be the output directory of second job. So pass a third argument also.

    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Job1");
    job.setJarByClass(Simpletask.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(Text.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    //AND THEN I WAIT THIS JOB TO COMPLETE.
    boolean complete = job.waitForCompletion(true);

    //I START A NEW JOB, BUT WHY IS IT NOT RUNNING?
    Configuration conf = new Configuration();
    Job job2 = Job.getInstance(conf, "Job2");
    job2.setJarByClass(Simpletask.class);
    job2.setMapperClass(TokenizerMapper.class);
    job2.setCombinerClass(IntSumReducer.class);
    job2.setReducerClass(IntSumReducer.class);
    job2.setOutputKeyClass(IntWritable.class);
    job2.setOutputValueClass(Text.class);
    FileInputFormat.addInputPath(job2, new Path(args[0]));
    FileOutputFormat.setOutputPath(job2, new Path(args[2]));

A few possible causes of errors:

  1. conf is declared twice (no compile error there?)
  2. The output path of job2 already exists, as it was created from job1 (+1 to Amal G Jose's answer )
  3. I think you should also use job.setMapOutputKeyClass(Text.class); and job.setMapOutputValueClass(IntWritable.class); for both jobs.
  4. Do you also have a command to execute job2 after the code snippet that you posted? I mean, do you actually run job2.waitForCompletion(true); , or something similar to that?

Overall: check the logs for error messages, which should clearly explain what went wrong.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM