hadoop中mapreduce的 Type mismatch in value from map错误
来源:互联网 发布:艾默生网络能源vertiv 编辑:程序博客网 时间:2024/05/16 02:53
还是dataguru第五周的视频练习,感谢老师的分享
import java.io.IOException;import java.io.InterruptedIOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.conf.Configured;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.*;import org.apache.hadoop.mapreduce.Mapper.Context;import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.util.Tool;import org.apache.hadoop.util.ToolRunner;public class Test2 extends Configured implements Tool {enum Counter {LINESKIP, // error line}public static class Map extendsMapper<LongWritable, Text, Text, Text> {public void map(LongWritable key, Text value, Context context)throws IOException, InterruptedException {String line = value.toString();try {String[] lineSplit = line.split(" ");String anum= lineSplit[0];String bnum = lineSplit[1];context.write(new Text(anum) ,new Text(bnum));} catch (ArrayIndexOutOfBoundsException e) {context.getCounter(Counter.LINESKIP).increment(1);//error counter add 1return;}}}// end mappublic static class Reduce extends Reducer<Text,Text,Text,Text>{public void reduce(Text key,Iterable<Text> values,Context context)throws IOException,InterruptedException{String valueString;String out="";for(Text value:values){valueString=value.toString();out+=valueString+"|";}//end forcontext.write(key, new Text(out));}//end reduce}//end Reducepublic int run(String[] args) throws Exception {Configuration conf = getConf();Job job = new Job(conf, "Test2");job.setJarByClass(Test2.class);FileInputFormat.addInputPath(job, new Path(args[0]));FileOutputFormat.setOutputPath(job, new Path(args[1]));job.setMapperClass(Map.class);job.setReducerClass(Reducer.class);job.setOutputFormatClass(TextOutputFormat.class);job.setOutputKeyClass(NullWritable.class);job.setOutputValueClass(Text.class);job.waitForCompletion(true);return job.isSuccessful() ? 0 : 1;}// end runpublic static void main(String[] args) throws Exception {int res = ToolRunner.run(new Configuration(), new Test2(), args);System.exit(res);}// end main}//end Test2
1.上面的代码会出现标题的错误,要改掉标记的地方
job.setOutputKeyClass(Text.class);
import java.io.IOException;import java.io.InterruptedIOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.conf.Configured;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.*;import org.apache.hadoop.mapreduce.Mapper.Context;import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.util.Tool;import org.apache.hadoop.util.ToolRunner;public class Test2 extends Configured implements Tool {enum Counter {LINESKIP, // error line}public static class Map extendsMapper<LongWritable, Text, Text, Text> {public void map(LongWritable key, Text value, Context context)throws IOException, InterruptedException {String line = value.toString();try {String[] lineSplit = line.split(" ");String anum= lineSplit[0];String bnum = lineSplit[1];context.write(new Text(anum) ,new Text(bnum));} catch (ArrayIndexOutOfBoundsException e) {context.getCounter(Counter.LINESKIP).increment(1);//error counter add 1return;}}}// end mapimport java.io.InterruptedIOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.conf.Configured;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.*;import org.apache.hadoop.mapreduce.Mapper.Context;import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.Mapper;import org.apache.hadoop.mapreduce.Reducer;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.util.Tool;import org.apache.hadoop.util.ToolRunner;public class Test2 extends Configured implements Tool {enum Counter {LINESKIP, // error line}public static class Map extendsMapper<LongWritable, Text, Text, Text> {public void map(LongWritable key, Text value, Context context)throws IOException, InterruptedException {String line = value.toString();try {String[] lineSplit = line.split(" ");String anum= lineSplit[0];String bnum = lineSplit[1];context.write(new Text(anum) ,new Text(bnum));} catch (ArrayIndexOutOfBoundsException e) {context.getCounter(Counter.LINESKIP).increment(1);//error counter add 1return;}}}// end mappublic static class Reduce extends Reducer<Text,Text,Text,Text>{public void reduce(Text key,Iterable<Text> values,Context context)throws IOException,InterruptedException{String valueString;String out="";for(Text value:values){valueString=value.toString();out+=valueString+"|";}//end forcontext.write(key, new Text(out));}//end reduce}//end Reducepublic int run(String[] args) throws Exception {Configuration conf = getConf();Job job = new Job(conf, "Test2");job.setJarByClass(Test2.class);FileInputFormat.addInputPath(job, new Path(args[0]));FileOutputFormat.setOutputPath(job, new Path(args[1]));job.setMapperClass(Map.class);job.setReducerClass(Reduce.class);job.setOutputFormatClass(TextOutputFormat.class);job.setOutputKeyClass(NullWritable.class);job.setOutputValueClass(Text.class);job.waitForCompletion(true);return job.isSuccessful() ? 0 : 1;}// end runpublic static void main(String[] args) throws Exception {int res = ToolRunner.run(new Configuration(), new Test2(), args);System.exit(res);}// end main}//end Test2
job.setMapperClass(Map.class);
job.setReducerClass(Reducer.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.waitForCompletion(true);
return job.isSuccessful() ? 0 : 1}// end runpublic static void main(String[] args) throws Exception {int res = ToolRunner.run(new Configuration(), new Test2(), args);System.exit(res);}// end main}//end Test2但结果依然不正确,只是输入的倒序后来把红色改为
job.setReducerClass(Reduce.class);
所以要Reducer的类名必须一致,否则可能使用系统默认的Reducer,就是倒序输出
0 0
- hadoop中mapreduce的 Type mismatch in value from map错误
- hadoop问题Type mismatch in value from map解决方法
- hadoop问题Type mismatch in value from map解决方法
- hadoop-1.0.4 Type mismatch in value from map解决方法
- hadoop 之 MR程序报Type mismatch in value from map错
- Mapreduce Error: Type mismatch in key from map
- Mapreduce Error: Type mismatch in key from map
- IOException: Type mismatch in key from map的处理
- Type mismatch in key from map: expected org.apache.hadoop.io.LongWritable, received org.apache.hadoo
- 【Hadoop】Hadoop开发中wrong key/value与type mismatch错误
- Type mismatch in key from map: expected org.apache.hadoop.io.Text, received org.apache.hadoop.io.Lon
- java.io.IOException: Type mismatch in key from map: expected
- java.io.IOException: Type mismatch in key from map:解决方法
- java.io.IOException: Type mismatch in key from map: expected org.apache.hadoop.io.Text, recieved org
- junit Type mismatch: cannot convert from Test to Annotation | eclipse junit 的Test错误
- Type mismatch: cannot convert from CharFragment to Fragment错误
- Convert Value type to Map in Golang
- 由"Type mismatch: cannot convert from View to ProgressBar"联想到的类型转换错误的根源
- HDU-1030-DeltaWave
- POJ 1128
- C#格式化日期时间
- 翻转字符串(剑指offer42)
- 连续离散时间四种信号之间关系
- hadoop中mapreduce的 Type mismatch in value from map错误
- 最长子序列相关
- Algorithm学习笔记 --- Fibonacci polynomial(动态规划)
- Ajax 入门1:如何用Ajax建立一个简单的web应用程序(html + javascript + php)
- 数组 字典 复习
- 理解WebKit和Chromium: 调试Android系统上的Chromium
- 升级Ubuntu 12.04下的gcc到4.8
- 几个DSP高手的经验介绍(转)
- HDU-3328-Flipper(栈模拟)