八维 2017-09-28 08:00 采纳率: 0%
浏览 1412

getPartition中的numPartitions参数怎么获取的?

job设置中没有设置NumReduceTask个数,
int getPartition(IntWritable key, IntWritable value, int numPartitions)这个参数怎么确定呢?

例子全部代码如下:

 import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Partitioner;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;

import java.io.IOException;

/**
 * Created by dell on 2017/9/25.
 * @auther w
 *
 */
public class MySort {

    static final String INPUT_PATH = "hdfs://hadoopwang0:9000/test";
    static final String OUT_PATH = "hdfs://hadoopwang0:9000/testout";
    public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {


        Configuration conf = new Configuration();
//        String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
//        if (otherArgs.length != 2) {
//            System.err.println("Usage: wordcount <in> <out>");
//            System.exit(2);
//        }

        Job job = new Job(conf, "MySort");
        job.setJarByClass(MySort.class);
        job.setMapperClass(MyMap.class);
        job.setReducerClass(MyReduce.class);
        job.setPartitionerClass(MyPartition.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(IntWritable.class);
        FileInputFormat.addInputPath(job, new Path(INPUT_PATH));
        FileOutputFormat.setOutputPath(job, new Path(OUT_PATH));
        System.exit(job.waitForCompletion(true) ? 0:1);
    }

    //Map方法:将输入的value转化为IntWritable类型,作为输出的Key。
    public static class MyMap extends Mapper<Object, Text, IntWritable, IntWritable>{
        private static  IntWritable data = new IntWritable();

        @Override
        protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();

            data.set(Integer.parseInt(line));
            context.write(data, new IntWritable(1));
        }

    }

    //Reduce方法:将输入的Key复制到输出的value中,然后根据输入的<value-list>中元素的个数决定Key的输出次数
    //全局用linenum来代表key的位次
    public  static class MyReduce extends Reducer<IntWritable, IntWritable, IntWritable, IntWritable >{
        private static IntWritable linenum = new IntWritable(1);

        @Override
        protected void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            System.out.println("Reducer:"+key);
            for (IntWritable val : values) {
                context.write(linenum, key);
                linenum = new IntWritable(linenum.get() + 1);
            }
        }
    }
    //自定义Partition函数:此函数根据输入的数据的最大值和MapReduce框架中的partition数量获取将输入数据按照
    //按照大小分块的边界,然后根据输入数值和边界关系返回对应的Partiton ID
    public static class MyPartition extends Partitioner<IntWritable, IntWritable>{
        public int getPartition(IntWritable key, IntWritable value, int numPartitions) {
            int Maxnumber = 6522;
            int bound = Maxnumber / numPartitions + 1;
            int Keynumber = key.get();
            for (int i = 0; i < numPartitions; i++) {
                if (Keynumber < bound * i && Keynumber >= bound * (i - 1)) {
                    return i - 1;
                }
            }
            return -1;
        }
    }


}
  • 写回答

1条回答 默认 最新

  • threenewbee 2017-09-28 15:57
    关注
    评论

报告相同问题?

悬赏问题

  • ¥15 程序不包含适用于入口点的静态Main方法
  • ¥15 素材场景中光线烘焙后灯光失效
  • ¥15 请教一下各位,为什么我这个没有实现模拟点击
  • ¥15 执行 virtuoso 命令后,界面没有,cadence 启动不起来
  • ¥50 comfyui下连接animatediff节点生成视频质量非常差的原因
  • ¥20 有关区间dp的问题求解
  • ¥15 多电路系统共用电源的串扰问题
  • ¥15 slam rangenet++配置
  • ¥15 有没有研究水声通信方面的帮我改俩matlab代码
  • ¥15 ubuntu子系统密码忘记