这是我的java代码,用于Mapreduce打包,至于Linux里。
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class WordCountAPP {
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
if(args.length == 0){
args = new String[]{"/wordcount.txt","/wordcount-result"};
}
Configuration conf=new Configuration();
Job job=Job.getInstance(conf,WordCountMap.class.getName());
//打成jar包运行
job.setJarByClass(WordCountMap.class);
//数据来自哪里
FileInputFormat.setInputPaths(job,args[0]);
//使用哪个mapper处理输入的数据
job.setMapperClass(WordCountMap.class);
//map输出的数据类型是什么
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
//使用哪个reducer处理输出的数据
job.setReducerClass(WordCountReduce.class);
//reduce输出数据的类型是什么
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
//数据输出到哪里
FileOutputFormat.setOutputPath(job,new Path(args[1]));
//交给yarn去执行,直到执行结束后才退出本程序
job.waitForCompletion(true);
}
public static class WordCountMap extends Mapper {
@Override
public void map(LongWritable key, Text value, Mapper.Context context) throws IOException, InterruptedException {
String line=value.toString();
String[] splited=line.split(" ");
for(String word: splited){
context.write(new Text(word),new LongWritable(1));
}
}
}
public static class WordCountReduce extends Reducer {
@Override
public void reduce(Text key, Iterable values, Reducer.Context context) throws IOException, InterruptedException {
long count=0L;
for (LongWritable v:values){
count+=v.get();
}
LongWritable v2=new LongWritable(count);
context.write(key,v2);
}
}
}
这是运行后报的错
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration
at WordCountAPP.main(WordCountAPP.java:17)
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.conf.Configuration
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:349)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 1 more