代码放到MyMatrix这个文件夹下的MartrixMultiply.java文件里面。程序的编译和打包成jar包一切顺利,但是当我运行在hadoop下运行jar包时出了问题。我输入了以下linux命令:hadoop jar MartrixMultiply.jar MyMatrix/MartrixMultiply 但是给我报出了如下错误:Exception in thread "main" java.lang.ClassNotFoundException: MyMatrix.MartrixMultiply。我也不太懂java,艘了很多资料没找到解决的办法,请问我是哪里错了
package MyMatrix;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.MultipleInputs;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class MartrixMultiply{
/**
* 最后得到的矩阵的列数
*/
public static final int COL_COUNT = 2;
/**
* 最后得到的矩阵的行数
*/
public static final int ROW_COUNT = 4;
/**
* A矩阵的列数或者是B矩阵的行数
*/
public static final int BROW_ACOL= 3;
public static class MartrixMaperA extends Mapper<LongWritable, Text, Text, Text>{
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String valueStr = value.toString();
String[] items = valueStr.split(",");
int rowIndex = Integer.parseInt(items[0]);
int colIndex = Integer.parseInt(items[1]);
int valueInt = Integer.parseInt(items[2]);
Text outKey = null;
Text outValue = null;
for(int i=0;i<COL_COUNT;i++){
outKey = new Text(rowIndex + "," + (i+1));
outValue = new Text("a,"+colIndex+","+valueInt);
context.write(outKey, outValue);
}
}
}
public static class MartrixMaperB extends Mapper<LongWritable, Text, Text, Text>{
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String valueStr = value.toString();
String[] items = valueStr.split(",");
int rowIndex = Integer.parseInt(items[0]);
int colIndex = Integer.parseInt(items[1]);
int valueInt = Integer.parseInt(items[2]);
Text outKey = null;
Text outValue = null;
for(int i=0;i<ROW_COUNT;i++){
outKey = new Text((i+1) + "," + colIndex);
outValue = new Text("b,"+rowIndex+","+valueInt);
context.write(outKey, outValue);
}
}
}
public static class MartrixReducer extends Reducer<Text,Text,Text,IntWritable>{
@Override
protected void reduce(Text key, Iterable<Text> values,Context context)
throws IOException, InterruptedException {
String[] items = new String[3];
int[] valueA = new int[BROW_ACOL];
int[] valueB = new int[BROW_ACOL];
Iterator<Text> it = values.iterator();
while(it.hasNext()){
items = it.next().toString().split(",");
if(items[0].equals("a")){
valueA[Integer.parseInt(items[1])-1] = Integer.parseInt(items[2]);
}else if(items[0].equals("b")){
valueB[Integer.parseInt(items[1])-1] = Integer.parseInt(items[2]);
}
}
int result = 0;
for(int i=0;i<BROW_ACOL;i++){
result += valueA[i]*valueB[i];
}
context.write(key, new IntWritable(result));
}
}
@SuppressWarnings("deprecation")
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Path pathA = new Path("hdfs://localhost:9000/Martrix/a.txt");
Path pathB = new Path("hdfs://localhost:9000/Martrix/b.txt");
Path pathOut = new Path("hdfs://localhost:9000/Martrix/out");
Configuration conf = new Configuration();
Job job = new Job(conf,"MartrixMultiply");
job.setJarByClass(MartrixMultiply.class);
MultipleInputs.addInputPath(job, pathA, TextInputFormat.class, MartrixMaperA.class);
MultipleInputs.addInputPath(job, pathB, TextInputFormat.class, MartrixMaperB.class);
job.setReducerClass(MartrixReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileOutputFormat.setOutputPath(job, pathOut);
if(job.waitForCompletion(true)){
System.exit(0);
}else{
System.exit(1);
}
}
}