Reducer 不工作或从未接到电话
Reducer Not Working or Never geting call
Driver class:
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
public class DRIVER {
public static void main(String arg[])
{
try{
Path in = new Path("aamazon.txt");
Path out = new Path("/output");
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(DRIVER.class);
job.setMapperClass(MAPPER.class);
job.setReducerClass(REDUCER.class);
job.setNumReduceTasks(0);
FileInputFormat.addInputPath(job, in);
FileOutputFormat.setOutputPath(job, out);
job.waitForCompletion(true);
System.out.println("Successful");}
catch(Exception e){
System.out.println(e.getMessage());
}
}
}
映射器Class:
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class MAPPER extends Mapper<LongWritable,Text,LongWritable,Text>{
@Override
public void map(LongWritable key,Text value,Context con)
{
try
{
System.out.println(key +"\n"+ value);
con.write(key, value);
}
catch(Exception e)
{
System.out.println(e.getMessage());
}
}
}
减速机Class:
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class REDUCER extends Reducer<LongWritable,Text,LongWritable,Text>{
@Override
public void reduce(LongWritable key,Iterable<Text> value , Context con)
{
System.out.println("reducer");
try{
for(Text t:value)
{
con.write(key, t);
}
}
catch (Exception e)
{
System.out.println(e.getMessage());
}
}
}
问题:
- 执行直到 Mapper
- Reducer 永远不会被调用
- 如果我设置
setNumReduceTasks(0)
则不会调用 Mapper
知道哪里出了问题吗?
您已将reduce 任务数设置为零。
Job job = Job.getInstance(conf);
job.setJarByClass(DRIVER.class);
job.setMapperClass(MAPPER.class);
job.setReducerClass(REDUCER.class);
job.setNumReduceTasks(0); // this should be greater than 0
即使在那之后,如果它不起作用,请检查您是否对以下行中的“/output”路径具有权限 -
Path out = new Path("/output"); // it is in the root folder. change it to "./output"
Driver class:
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
public class DRIVER {
public static void main(String arg[])
{
try{
Path in = new Path("aamazon.txt");
Path out = new Path("/output");
Configuration conf = new Configuration();
Job job = Job.getInstance(conf);
job.setJarByClass(DRIVER.class);
job.setMapperClass(MAPPER.class);
job.setReducerClass(REDUCER.class);
job.setNumReduceTasks(0);
FileInputFormat.addInputPath(job, in);
FileOutputFormat.setOutputPath(job, out);
job.waitForCompletion(true);
System.out.println("Successful");}
catch(Exception e){
System.out.println(e.getMessage());
}
}
}
映射器Class:
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class MAPPER extends Mapper<LongWritable,Text,LongWritable,Text>{
@Override
public void map(LongWritable key,Text value,Context con)
{
try
{
System.out.println(key +"\n"+ value);
con.write(key, value);
}
catch(Exception e)
{
System.out.println(e.getMessage());
}
}
}
减速机Class:
import java.io.IOException;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class REDUCER extends Reducer<LongWritable,Text,LongWritable,Text>{
@Override
public void reduce(LongWritable key,Iterable<Text> value , Context con)
{
System.out.println("reducer");
try{
for(Text t:value)
{
con.write(key, t);
}
}
catch (Exception e)
{
System.out.println(e.getMessage());
}
}
}
问题:
- 执行直到 Mapper
- Reducer 永远不会被调用
- 如果我设置
setNumReduceTasks(0)
则不会调用 Mapper
知道哪里出了问题吗?
您已将reduce 任务数设置为零。
Job job = Job.getInstance(conf);
job.setJarByClass(DRIVER.class);
job.setMapperClass(MAPPER.class);
job.setReducerClass(REDUCER.class);
job.setNumReduceTasks(0); // this should be greater than 0
即使在那之后,如果它不起作用,请检查您是否对以下行中的“/output”路径具有权限 -
Path out = new Path("/output"); // it is in the root folder. change it to "./output"