my_nwpu 2017-03-27 14:58 采纳率: 0%
浏览 2039

java.lang.ClassCastException

public class ipSort {
public static class Map extends Mapper{
//将输入文件转换成的形式
private final static IntWritable ipNum = new IntWritable();
private Text ipAdd = new Text();
public void map(LongWritable key, IntWritable value, Context context)
throws IOException, InterruptedException{
//把每一行转成字符串
String line = value.toString();
// 分割每一行

StringTokenizer token = new StringTokenizer(line);
//solve every line
while(token.hasMoreElements()){
//divided by blank
StringTokenizer tokenLine = new StringTokenizer(token.nextToken());
ipAdd.set(token.nextToken().trim());
ipNum.set(Integer.valueOf(token.nextToken().trim()));
context.write(ipNum,new Text(ipAdd));

            }
        }
    }
    public static class Reduce extends Reducer<IntWritable, Text, Text, IntWritable>{
        //把Map阶段的输出结果颠倒;
        private Text result = new Text();
        public void reduce(IntWritable key,Iterable<Text> values, Context context) 
                throws IOException, InterruptedException{
            for(Text val : values){
                result.set(val.toString());
                context.write(new Text(result),key);
            }
        }
    }
    public static class IntKeyDescComparator extends WritableComparator{
        protected IntKeyDescComparator(){
            super(IntWritable.class,true);
        }
        public int compare(WritableComparable a, WritableComparable b){
            return super.compare(a, b);

        }
    }
    public static void main(String args[]) 
            throws IOException, ClassNotFoundException, InterruptedException{
        System.setProperty("hadoop.home.dir", "C:\\Users\\lenovo\\Desktop\\hadoop-2.6.0\\hadoop-2.6.0");
        Configuration conf = new Configuration();
        conf.set("mapred.job.tracker",  "192.168.142.138");

        Job job = new Job(conf,"ipSort");
        job.setJarByClass(ipSort.class);
        job.setSortComparatorClass(IntKeyDescComparator.class);

        job.setMapperClass(Map.class);
        job.setReducerClass(Reduce.class);

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);

        FileInputFormat.addInputPath(job, new Path("hdfs://10.170.54.193:9000/input"));
        FileOutputFormat.setOutputPath(job, new Path("hdfs://10.170.54.193:9000/output"));
        System.exit(job.waitForCompletion(true)?0:1);

    }
            运行时出现问题Caused by: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.IntWritable,但是找不到哪里类型转换错误了
  • 写回答

3条回答 默认 最新

报告相同问题?

悬赏问题

  • ¥15 c程序不知道为什么得不到结果
  • ¥40 复杂的限制性的商函数处理
  • ¥15 程序不包含适用于入口点的静态Main方法
  • ¥15 素材场景中光线烘焙后灯光失效
  • ¥15 请教一下各位,为什么我这个没有实现模拟点击
  • ¥15 执行 virtuoso 命令后,界面没有,cadence 启动不起来
  • ¥50 comfyui下连接animatediff节点生成视频质量非常差的原因
  • ¥20 有关区间dp的问题求解
  • ¥15 多电路系统共用电源的串扰问题
  • ¥15 slam rangenet++配置