mapreduce包错误

qoefvg9y  于 2021-05-30  发布在  Hadoop
关注(0)|答案(1)|浏览(445)

//这3个项目需要什么软件包???如何将这3个程序组合成一个程序??如何在eclipse中使用这个3程序来做mapreduce??
请帮助我成功运行这个程序
操作系统:linux
面临的例外情况:
类型maxpyear.maxpubyearreducer的方法tryparseint(string)未定义
2.类型作业中的方法setinputformatclass(class)不适用于参数(class)
Map程序代码:

  1. public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
  2. {
  3. public void map(LongWritable key, Text value , Context context)
  4. throws IOException, InterruptedException
  5. {
  6. String delim = "\t";
  7. Text valtosend = new Text();
  8. String tokens[] = value.toString().split(delim);
  9. if (tokens.length == 2)
  10. {
  11. valtosend.set(tokens[0] + ";"+ tokens[1]);
  12. context.write(new IntWritable(1), valtosend);
  13. }
  14. }
  15. }

减速机代码:

  1. public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
  2. {
  3. public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
  4. {
  5. int maxiValue = Integer.MIN_VALUE;
  6. String maxiYear = "";
  7. for(Text value:values) {
  8. String token[] = value.toString().split(";");
  9. if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
  10. {
  11. maxiValue = TryParseInt(token[1]);
  12. maxiYear = token[0];
  13. }
  14. }
  15. context.write(new Text(maxiYear), new IntWritable(maxiValue));
  16. }
  17. }

驱动程序代码:

  1. public static void main(String[] args) throws Exception {
  2. Configuration conf = new Configuration();
  3. Job job = new Job(conf , "Frequency`enter code here`");
  4. job.setJarByClass(MaxPubYear.class);
  5. job.setOutputKeyClass(Text.class);
  6. job.setOutputValueClass(IntWritable.class);
  7. job.setMapperClass(FrequencyMapper.class);
  8. job.setCombinerClass(FrequencyReducer.class);
  9. job.setReducerClass(FrequencyReducer.class);
  10. job.setOutputFormatClass(TextOutputFormat.class);
  11. job.setInputFormatClass(TextInputFormat.class);
  12. FileInputFormat.addInputPath(job,new Path(args[0]));
  13. FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
  14. int exitCode = job.waitForCompletion(true)?0:1;
  15. if (exitCode == 0 )
  16. {
  17. Job SecondJob = new Job(conf, "Maximum Publication year");
  18. SecondJob.setJarByClass(MaxPubYear.class);
  19. SecondJob.setOutputKeyClass(Text.class);
  20. SecondJob.setOutputValueClass(IntWritable.class);
  21. SecondJob.setMapOutputKeyClass(IntWritable.class);
  22. SecondJob.setMapOutputValueClass(Text.class);
  23. SecondJob.setMapperClass(MaxPubYearMapper.class);
  24. SecondJob.setReducerClass(MaxPubYearReducer.class);
  25. FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
  26. FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
  27. System.exit(SecondJob.waitForCompletion(true)?0:1);
  28. }
  29. }
fafcakar

fafcakar1#

把它们写在一节课上
所需的软件包包括:

  1. package org.myorg;
  2. import java.io.IOException;
  3. import java.util.*;
  4. import org.apache.hadoop.fs.Path;
  5. import org.apache.hadoop.conf.*;
  6. import org.apache.hadoop.io.*;
  7. import org.apache.hadoop.mapred.*;
  8. import org.apache.hadoop.util.*;
  9. import java.io.DataInput;
  10. import java.io.DataOutput;

这里可能有一些额外的东西,因为我从代码中复制了它们。

  1. package org.myorg;
  2. import java.util.*;
  3. import org.apache.hadoop.fs.Path;
  4. import org.apache.hadoop.conf.*;
  5. import org.apache.hadoop.io.*;
  6. import org.apache.hadoop.mapred.*;
  7. import org.apache.hadoop.util.*;
  8. import java.text.SimpleDateFormat;
  9. import java.util.Date;
  10. import java.io.DataInput;
  11. import java.io.DataOutput;
  12. import java.io.IOException;
  13. public class <your classname as well as filename> {
  14. public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
  15. {
  16. public void map(LongWritable key, Text value , Context context)
  17. throws IOException, InterruptedException
  18. {
  19. String delim = "\t";
  20. Text valtosend = new Text();
  21. String tokens[] = value.toString().split(delim);
  22. if (tokens.length == 2)
  23. {
  24. valtosend.set(tokens[0] + ";"+ tokens[1]);
  25. context.write(new IntWritable(1), valtosend);
  26. }
  27. }
  28. }
  29. public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
  30. {
  31. public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
  32. {
  33. int maxiValue = Integer.MIN_VALUE;
  34. String maxiYear = "";
  35. for(Text value:values) {
  36. String token[] = value.toString().split(";");
  37. if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
  38. {
  39. maxiValue = TryParseInt(token[1]);
  40. maxiYear = token[0];
  41. }
  42. }
  43. context.write(new Text(maxiYear), new IntWritable(maxiValue));
  44. }
  45. }
  46. public static void main(String[] args) throws Exception {
  47. Configuration conf = new Configuration();
  48. Job job = new Job(conf , "Frequency`enter code here`");
  49. job.setJarByClass(MaxPubYear.class);
  50. job.setOutputKeyClass(Text.class);
  51. job.setOutputValueClass(IntWritable.class);
  52. job.setMapperClass(FrequencyMapper.class);
  53. job.setCombinerClass(FrequencyReducer.class);
  54. job.setReducerClass(FrequencyReducer.class);
  55. job.setOutputFormatClass(TextOutputFormat.class);
  56. job.setInputFormatClass(TextInputFormat.class);
  57. FileInputFormat.addInputPath(job,new Path(args[0]));
  58. FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
  59. int exitCode = job.waitForCompletion(true)?0:1;
  60. if (exitCode == 0 )
  61. {
  62. Job SecondJob = new Job(conf, "Maximum Publication year");
  63. SecondJob.setJarByClass(MaxPubYear.class);
  64. SecondJob.setOutputKeyClass(Text.class);
  65. SecondJob.setOutputValueClass(IntWritable.class);
  66. SecondJob.setMapOutputKeyClass(IntWritable.class);
  67. SecondJob.setMapOutputValueClass(Text.class);
  68. SecondJob.setMapperClass(MaxPubYearMapper.class);
  69. SecondJob.setReducerClass(MaxPubYearReducer.class);
  70. FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
  71. FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
  72. System.exit(SecondJob.waitForCompletion(true)?0:1);
  73. }
  74. }
  75. }
展开查看全部

相关问题