最近在看MapReduce,想起一直都是Copy 然后修改的方法来写。突然想试试自己动手写一个及其简单的mr程序。
细节决定成败啊,不试不知道,一试才能发现平时注意不到的细节。
下面是我用了很快时间写好的一个程序,注意,这份是有问题的!
- package wordcount;
-
- import java.io.IOException;
-
- import org.apache.commons.lang.StringUtils;
- import org.apache.hadoop.conf.Configuration;
- import org.apache.hadoop.conf.Configured;
- import org.apache.hadoop.fs.Path;
- import org.apache.hadoop.io.LongWritable;
- import org.apache.hadoop.io.Text;
- import org.apache.hadoop.mapreduce.Job;
- import org.apache.hadoop.mapreduce.Mapper;
- import org.apache.hadoop.mapreduce.Reducer;
- import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
- import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
- import org.apache.hadoop.util.Tool;
- import org.apache.hadoop.util.ToolRunner;
- import org.apache.log4j.Logger;
-
- public class MyWordCountJob extends Configured implements Tool {
- Logger log = Logger.getLogger(MyWordCountJob.class);
-
- public class MyWordCountMapper extends
- Mapper<LongWritable, Text, LongWritable, Text> {
- Logger log = Logger.getLogger(MyWordCountJob.class);
-
- LongWritable mapKey = new LongWritable();
- Text mapValue = new Text();
- @Override
- protected void map(LongWritable key, Text value, Context context)
- throws IOException, InterruptedException {
- mapKey.set(key.get());
- mapValue.set(value.toString());
- log.info("Mapper: mapKey--" + mapKey.get() + "mapValue --"+ mapValue.toString());
- context.write(mapKey, mapValue);
- }
-
- }
-
- public class MyWordCountReducer extends Reducer<LongWritable, Text, LongWritable, Text> {
-
- @Override
- protected void reduce(LongWritable key, Iterable<Text> values,Context context)
- throws IOException, InterruptedException {
- for(Text value :values)
- context.write(key, value);
- }
- }
-
- @Override
- public int run(String[] args) throws Exception {
- log.info("begin to run");
- Job job = Job.getInstance(getConf(), "MyWordCountJob");
- job.setJarByClass(MyWordCountJob.class);
-
- Path inPath = new Path("demos/pigdemo.txt");
- Path outPath = new Path("demos/pigdemoOut.txt");
-
- outPath.getFileSystem(getConf()).delete(outPath,true);
- TextInputFormat.setInputPaths(job, inPath);
- TextOutputFormat.setOutputPath(job, outPath);
-
-
- job.setMapperClass(MyWordCountJob.MyWordCountMapper.class);
- job.setReducerClass(MyWordCountJob.MyWordCountReducer.class);
- job.setInputFormatClass(TextInputFormat.class);
- job.setOutputFormatClass(TextOutputFormat.class);
-
- job.setMapOutputKeyClass(LongWritable.class);
- job.setMapOutputValueClass(Text.class);
- job.setOutputKeyClass(LongWritable.class);
- job.setOutputValueClass(Text.class);
- return job.waitForCompletion(true)?0:1;
- }
- public static void main(String [] args){
- int result = 0;
- try {
- result = ToolRunner.run(new Configuration(), new MyWordCountJob(), args);
- } catch (Exception e) {
- e.printStackTrace();
- }
- System.exit(result);
- }
-
- }
写完成编译,打包然后运行。
- 16/05/10 22:43:46 INFO mapreduce.Job: Running job: job_1462517728035_0033
- 16/05/10 22:43:54 INFO mapreduce.Job: Job job_1462517728035_0033 running in uber mode : false
- 16/05/10 22:43:54 INFO mapreduce.Job: map 0% reduce 0%
- 16/05/10 22:43:58 INFO mapreduce.Job: Task Id : attempt_1462517728035_0033_m_000000_0, Status : FAILED
- Error: java.lang.RuntimeException: java.lang.NoSuchMethodException: wordcount.MyWordCountJob$MyWordCountMapper.<init>()
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:131)
- at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:721)
- at org.apache.hadoop.mapred.MapTask.run(MapTask.java:339)
- at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:162)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:396)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
- at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:157)
- Caused by: java.lang.NoSuchMethodException: wordcount.MyWordCountJob$MyWordCountMapper.<init>()
- at java.lang.Class.getConstructor0(Class.java:2706)
- at java.lang.Class.getDeclaredConstructor(Class.java:1985)
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:125)
- ... 7 more
-
- 16/05/10 22:44:02 INFO mapreduce.Job: Task Id : attempt_1462517728035_0033_m_000000_1, Status : FAILED
- Error: java.lang.RuntimeException: java.lang.NoSuchMethodException: wordcount.MyWordCountJob$MyWordCountMapper.<init>()
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:131)
- at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:721)
- at org.apache.hadoop.mapred.MapTask.run(MapTask.java:339)
- at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:162)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:396)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
- at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:157)
- Caused by: java.lang.NoSuchMethodException: wordcount.MyWordCountJob$MyWordCountMapper.<init>()
- at java.lang.Class.getConstructor0(Class.java:2706)
- at java.lang.Class.getDeclaredConstructor(Class.java:1985)
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:125)
- ... 7 more
-
- 16/05/10 22:44:07 INFO mapreduce.Job: Task Id : attempt_1462517728035_0033_m_000000_2, Status : FAILED
- Error: java.lang.RuntimeException: java.lang.NoSuchMethodException: wordcount.MyWordCountJob$MyWordCountMapper.<init>()
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:131)
- at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:721)
- at org.apache.hadoop.mapred.MapTask.run(MapTask.java:339)
- at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:162)
- at java.security.AccessController.doPrivileged(Native Method)
- at javax.security.auth.Subject.doAs(Subject.java:396)
- at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1491)
- at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:157)
- Caused by: java.lang.NoSuchMethodException: wordcount.MyWordCountJob$MyWordCountMapper.<init>()
- at java.lang.Class.getConstructor0(Class.java:2706)
- at java.lang.Class.getDeclaredConstructor(Class.java:1985)
- at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:125)
- ... 7 more
-
- 16/05/10 22:44:14 INFO mapreduce.Job: map 100% reduce 100%
- 16/05/10 22:44:14 INFO mapreduce.Job: Job job_1462517728035_0033 failed with state FAILED due to: Task failed task_1462517728035_0033_m_000000
- Job failed as tasks failed. failedMaps:1 failedReduces:0
-
- 16/05/10 22:44:15 INFO mapreduce.Job: Counters: 6
- Job Counters
- Failed map tasks=4
- Launched map tasks=4
- Other local map tasks=3
- Data-local map tasks=1
- Total time spent by all maps in occupied slots (ms)=99584
- Total time spent by all reduces in occupied slots (ms)=0
上面的问题百思不得甚解,完全不知道什么地方错了。
然后跟之前copy的代码进行比对。终于找出了问题所在!
注意Mapper 与 Reducer 类写成内部类,一定要加static !!!!
留个小任务,查看一下生成的结果文件可以发现什么?
使用TextInputFormat时,进入map 函数中的LongWritable类型的key 代表什么?
经实验确认这个key 其实是本行的首字符在整个文件中的偏移量。