my_nwpu 2017-03-27 14:58 采纳率: 0%
浏览 2039

java.lang.ClassCastException

public class ipSort {
public static class Map extends Mapper{
//将输入文件转换成的形式
private final static IntWritable ipNum = new IntWritable();
private Text ipAdd = new Text();
public void map(LongWritable key, IntWritable value, Context context)
throws IOException, InterruptedException{
//把每一行转成字符串
String line = value.toString();
// 分割每一行

StringTokenizer token = new StringTokenizer(line);
//solve every line
while(token.hasMoreElements()){
//divided by blank
StringTokenizer tokenLine = new StringTokenizer(token.nextToken());
ipAdd.set(token.nextToken().trim());
ipNum.set(Integer.valueOf(token.nextToken().trim()));
context.write(ipNum,new Text(ipAdd));

            }
        }
    }
    public static class Reduce extends Reducer<IntWritable, Text, Text, IntWritable>{
        //把Map阶段的输出结果颠倒;
        private Text result = new Text();
        public void reduce(IntWritable key,Iterable<Text> values, Context context) 
                throws IOException, InterruptedException{
            for(Text val : values){
                result.set(val.toString());
                context.write(new Text(result),key);
            }
        }
    }
    public static class IntKeyDescComparator extends WritableComparator{
        protected IntKeyDescComparator(){
            super(IntWritable.class,true);
        }
        public int compare(WritableComparable a, WritableComparable b){
            return super.compare(a, b);

        }
    }
    public static void main(String args[]) 
            throws IOException, ClassNotFoundException, InterruptedException{
        System.setProperty("hadoop.home.dir", "C:\\Users\\lenovo\\Desktop\\hadoop-2.6.0\\hadoop-2.6.0");
        Configuration conf = new Configuration();
        conf.set("mapred.job.tracker",  "192.168.142.138");

        Job job = new Job(conf,"ipSort");
        job.setJarByClass(ipSort.class);
        job.setSortComparatorClass(IntKeyDescComparator.class);

        job.setMapperClass(Map.class);
        job.setReducerClass(Reduce.class);

        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(Text.class);

        FileInputFormat.addInputPath(job, new Path("hdfs://10.170.54.193:9000/input"));
        FileOutputFormat.setOutputPath(job, new Path("hdfs://10.170.54.193:9000/output"));
        System.exit(job.waitForCompletion(true)?0:1);

    }
            运行时出现问题Caused by: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to org.apache.hadoop.io.IntWritable,但是找不到哪里类型转换错误了
  • 写回答

3条回答 默认 最新

报告相同问题?

悬赏问题

  • ¥15 sqlite 附加(attach database)加密数据库时,返回26是什么原因呢?
  • ¥88 找成都本地经验丰富懂小程序开发的技术大咖
  • ¥15 如何处理复杂数据表格的除法运算
  • ¥15 如何用stc8h1k08的片子做485数据透传的功能?(关键词-串口)
  • ¥15 有兄弟姐妹会用word插图功能制作类似citespace的图片吗?
  • ¥200 uniapp长期运行卡死问题解决
  • ¥15 latex怎么处理论文引理引用参考文献
  • ¥15 请教:如何用postman调用本地虚拟机区块链接上的合约?
  • ¥15 为什么使用javacv转封装rtsp为rtmp时出现如下问题:[h264 @ 000000004faf7500]no frame?
  • ¥15 乘性高斯噪声在深度学习网络中的应用