簡體   English   中英

Java.lang.ClassnotfoundException hadoop

[英]Java.lang.ClassnotfoundException hadoop

我是 hadoop 和 mapreduce 編程的新手。 我寫了一個程序來計算平均值。 我在 /home/cloudera 准備了一個 jar 文件。 我在 /home/cloudera/StockPrediction/input/ 和 /home/cloudera/StockPrediction/output 中創建了一個文件夾來為程序提供輸入和輸出。 我的代碼是:

package mainpackage;


import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;



import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.DoubleWritable;
//import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;

import supportpackage.DoubleArrayWritable;
//import supportpackage.IntArrayWritable;


public class MainFile {
    private static final int WEEKDAYS = 5;
    private static int avgTokensLength = 0;
/*******************************************************************************************************/
//Job 1: Average Mapper Class
    public static class avgMapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, ArrayWritable>{

        @Override
        public void map(LongWritable key, Text value,OutputCollector<Text, ArrayWritable> output, Reporter reporter) throws IOException {
            String vString= value.toString();
            StringTokenizer tokens = new StringTokenizer(vString, ",");
            String company = tokens.nextToken();

            avgTokensLength = tokens.countTokens();
            DoubleWritable prices[] = new DoubleWritable[WEEKDAYS];

            int index = 0;
            while (tokens.hasMoreTokens()) {
                prices[index] = new DoubleWritable(Double.parseDouble(tokens.nextToken()));
                index++;

                if (index == WEEKDAYS) {
                    output.collect(new Text(company), new DoubleArrayWritable(prices));
                    index = 0;
                }
            }

        }
    }

    /*************************************************************************************************/
    // Job1: Average Reducer class
    public static class avgReduce extends MapReduceBase
            implements Reducer<Text, DoubleArrayWritable, Text, DoubleArrayWritable> {

        private static int newIndex = 0;

        // Average Reduce function
        public void reduce(Text key, Iterator<DoubleArrayWritable> values,
                OutputCollector<Text, DoubleArrayWritable> output, Reporter reporter) throws IOException {

            DoubleWritable prices[] = new DoubleWritable[WEEKDAYS];
            DoubleWritable avgPrices[] = new DoubleWritable[avgTokensLength / WEEKDAYS];
            DoubleWritable sum = new DoubleWritable(0);

            int index = 0, count = 0;
            while (values.hasNext()) {
                index = 0;
                count = 0;
                ArrayWritable val = values.next();
                for (Writable writable : val.get()) {
                    prices[index] = (DoubleWritable) writable;
                    index++;
                }

                // index = 0;
                count = 0;
                for (int i = 0; i < prices.length; i++) {
                    sum = new DoubleWritable(sum.get() + prices[i].get());
                    count++;


                    if (count == WEEKDAYS) {
                        avgPrices[newIndex] = new DoubleWritable(sum.get() / WEEKDAYS);
                        newIndex++;
                        sum = new DoubleWritable(0);
                        count = 0;
                    }
                }
            }
            output.collect(key, new DoubleArrayWritable(avgPrices));
            newIndex = 0;
        }
    }


    public static void main(String args[])throws Exception{

        JobConf average = new JobConf(MainFile.class);
        /***********************************************************/
        //JOB 1:
        average.setJobName("Average");
        average.setMapOutputKeyClass(Text.class);
        average.setMapOutputValueClass(DoubleArrayWritable.class);
        average.setMapperClass(avgMapper.class);
        average.setReducerClass(avgReduce.class);
        average.setInputFormat(TextInputFormat.class);
        average.setOutputFormat(TextOutputFormat.class);
        //String inputfile=args[0];
        //String outputfile=args[1];
        FileInputFormat.setInputPaths(average, new Path(args[0]));
        FileOutputFormat.setOutputPath(average, new Path(args[1]));

        JobClient.runJob(average);

    }

}

我創建了一個 jar 文件並嘗試使用以下命令運行:

hadoop jar /home/cloudera/average.jar /home/cloudera/StockPrediction/input/qualcomm.csv /home/cloudera/StockPrediction/output/qualcomm

我收到此錯誤。 誰能幫我嗎。

Exception in thread "main" java.lang.ClassNotFoundException: /home/cloudera/StockPrediction/input/qualcomm/csv
    at java.lang.Class.forName0(Native Method)
    at java.lang.Class.forName(Class.java:270)
    at org.apache.hadoop.util.RunJar.run(RunJar.java:214)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:136)


您尚未在 JAR 名稱后指定主類
試試這個命令:
hadoop jar /home/cloudera/average.jar mainpackage.MainFile /home/cloudera/StockPrediction/input/qualcomm.csv /home/cloudera/StockPrediction/output/qualcomm

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM