Я новичок в программировании oop и mapreduce. Я написал программу для расчета среднего значения. Я подготовил файл фляги в / home / cloudera. Я создал папку для ввода и вывода данных для программирования в / home / cloudera / StockPrediction / input / и / home / cloudera / StockPrediction / output. Мой код:
package mainpackage;
import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.DoubleWritable;
//import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import supportpackage.DoubleArrayWritable;
//import supportpackage.IntArrayWritable;
public class MainFile {
private static final int WEEKDAYS = 5;
private static int avgTokensLength = 0;
/*******************************************************************************************************/
//Job 1: Average Mapper Class
public static class avgMapper extends MapReduceBase implements Mapper<LongWritable, Text, Text, ArrayWritable>{
@Override
public void map(LongWritable key, Text value,OutputCollector<Text, ArrayWritable> output, Reporter reporter) throws IOException {
String vString= value.toString();
StringTokenizer tokens = new StringTokenizer(vString, ",");
String company = tokens.nextToken();
avgTokensLength = tokens.countTokens();
DoubleWritable prices[] = new DoubleWritable[WEEKDAYS];
int index = 0;
while (tokens.hasMoreTokens()) {
prices[index] = new DoubleWritable(Double.parseDouble(tokens.nextToken()));
index++;
if (index == WEEKDAYS) {
output.collect(new Text(company), new DoubleArrayWritable(prices));
index = 0;
}
}
}
}
/*************************************************************************************************/
// Job1: Average Reducer class
public static class avgReduce extends MapReduceBase
implements Reducer<Text, DoubleArrayWritable, Text, DoubleArrayWritable> {
private static int newIndex = 0;
// Average Reduce function
public void reduce(Text key, Iterator<DoubleArrayWritable> values,
OutputCollector<Text, DoubleArrayWritable> output, Reporter reporter) throws IOException {
DoubleWritable prices[] = new DoubleWritable[WEEKDAYS];
DoubleWritable avgPrices[] = new DoubleWritable[avgTokensLength / WEEKDAYS];
DoubleWritable sum = new DoubleWritable(0);
int index = 0, count = 0;
while (values.hasNext()) {
index = 0;
count = 0;
ArrayWritable val = values.next();
for (Writable writable : val.get()) {
prices[index] = (DoubleWritable) writable;
index++;
}
// index = 0;
count = 0;
for (int i = 0; i < prices.length; i++) {
sum = new DoubleWritable(sum.get() + prices[i].get());
count++;
if (count == WEEKDAYS) {
avgPrices[newIndex] = new DoubleWritable(sum.get() / WEEKDAYS);
newIndex++;
sum = new DoubleWritable(0);
count = 0;
}
}
}
output.collect(key, new DoubleArrayWritable(avgPrices));
newIndex = 0;
}
}
public static void main(String args[])throws Exception{
JobConf average = new JobConf(MainFile.class);
/***********************************************************/
//JOB 1:
average.setJobName("Average");
average.setMapOutputKeyClass(Text.class);
average.setMapOutputValueClass(DoubleArrayWritable.class);
average.setMapperClass(avgMapper.class);
average.setReducerClass(avgReduce.class);
average.setInputFormat(TextInputFormat.class);
average.setOutputFormat(TextOutputFormat.class);
//String inputfile=args[0];
//String outputfile=args[1];
FileInputFormat.setInputPaths(average, new Path(args[0]));
FileOutputFormat.setOutputPath(average, new Path(args[1]));
JobClient.runJob(average);
}
}
Я создал файл JAR и пытаюсь запустить с помощью этой команды:
hadoop jar /home/cloudera/average.jar /home/cloudera/StockPrediction/input/qualcomm.csv /home/cloudera/StockPrediction/output/qualcomm
Я получаю эту ошибку. Кто-нибудь может мне помочь.
Exception in thread "main" java.lang.ClassNotFoundException: /home/cloudera/StockPrediction/input/qualcomm/csv
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:270)
at org.apache.hadoop.util.RunJar.run(RunJar.java:214)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)