hadoop 报java.lang.InstantiationException

package mapreduce;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.partition.HashPartitioner;

public class WordCountApp {
static final String INPUT_PATH = "hdfs://chaoren:9000/hello";
static final String OUTPUT_PATH = "hdfs://chaoren:9000/hello_statics";
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = new Job(conf, WordCountApp.class.getSimpleName());
//1。1输入的目录在哪里
FileInputFormat.setInputPaths(job, INPUT_PATH);
//指定对输入的数据进行格式化处理
job.setInputFormatClass(TextInputFormat.class);
//1.2指定自定义的mapper类
job.setMapperClass(MyMapper.class);
//指定map输出的类型
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);

    //1.3 分区
    job.setPartitionerClass(HashPartitioner.class);
    job.setNumReduceTasks(1);

    //1.4 TODO  排序分组

    //1.5 TODO <可选>规约

    //2.2指定自定义的Reducer类
    job.setReducerClass(MyReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    //2.2指定输出路径
    FileOutputFormat.setOutputPath(job, new Path(OUTPUT_PATH));
    //2.3指定输出的格式化类
    job.setOutputFormatClass(FileOutputFormat.class);

    //把作业提交jobTracker运行
    job.waitForCompletion(true);

}
/**
 * KEYIN    即k1       表示每一行的起始位置<偏移量>
 * VALUEIN  即v1       表示每一行的文本内容
 * KEYOUT   即k1       表示每一行中的单词
 * VALUEOUT 即v1       表示每一行中,每个单词出现的次数 
 * @author Administrator
 *
 */
static class MyMapper extends Mapper<LongWritable, Text, Text, LongWritable> {
    @Override
    protected void map(LongWritable key, Text value,
            Context context)
            throws IOException, InterruptedException {
        String[] splited = value.toString().split("\t");
        for(String word : splited) {
            context.write(new Text(word), new LongWritable(1));
        }
    }
}
/**
 * KEYIN    即k2       表示每一行中每个单词
 * VALUEIN  即v2       表示每一行中每个单词出现的次数
 * KEYOUT   即k3       表示整个文件中的不同单词
 * VALUEOUT 即v3       表示整个文件中的不同单词的出现总数
 * @author Administrator
 *
 */
static class MyReducer extends Reducer<Text,LongWritable, Text, LongWritable>{

    protected void reduce(Text k2, Iterable<LongWritable> v2s,
            Context context)
            throws IOException, InterruptedException {
        long sum = 0l;
        for(LongWritable v2 : v2s){
            sum+=v2.get();
        }
        context.write(k2, new LongWritable(sum));
    }
}

}

Csdn user default icon
上传中...
上传图片
插入图片
抄袭、复制答案,以达到刷声望分或其他目的的行为,在CSDN问答是严格禁止的,一经发现立刻封号。是时候展现真正的技术了!
立即提问