mrunit 执行测试方法错误 HADOOP_HOME or hadoop.home.dir are not set
mrunit execute test method error HADOOP_HOME or hadoop.home.dir are not set
我想测试hadoop2 mapreduce代码,使用mrunit但是有一台error.local机器不要在windows中安装hadoop。
pom.xml 下面:
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>2.6.0-mr1-cdh5.4.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0-cdh5.4.2</version>
</dependency>
<dependency>
<groupId>org.apache.mrunit</groupId>
<artifactId>mrunit</artifactId>
<version>1.1.0</version>
<classifier>hadoop2</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>
地图代码如下:
package com.hadoop;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
/**
* Created by zhu on 2015/6/3.
*/
public class SMSCDRMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
private Text status = new Text();
private final static IntWritable addOne = new IntWritable(1);
/**
* Returns the SMS status code and its count
*/
protected void map(LongWritable key, Text value, Context context)
throws java.io.IOException, InterruptedException {
//655209;1;796764372490213;804422938115889;6 is the Sample record format
String[] line = value.toString().split(";");
// If record is of SMS CDR
if (Integer.parseInt(line[1]) == 1) {
status.set(line[4]);
context.write(status, addOne);
}
}
}
减少以下代码:
package com.hadoop;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
/**
* Created by zhu on 2015/6/3.
*/
public class SMSCDRReducer extends
Reducer<Text, IntWritable, Text, IntWritable> {
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws java.io.IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(key, new IntWritable(sum));
}
}
mrunit代码测试如下:
package com.hadoop;
/**
* Created by zhu on 2015/6/3.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mrunit.mapreduce.MapDriver;
import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
import org.junit.Before;
import org.junit.Test;
public class SMSCDRMapperReducerTest {
MapDriver<LongWritable, Text, Text, IntWritable> mapDriver;
ReduceDriver<Text, IntWritable, Text, IntWritable> reduceDriver;
MapReduceDriver<LongWritable, Text, Text, IntWritable, Text, IntWritable> mapReduceDriver;
Configuration conf = new Configuration();
@Before
public void setUp() {
SMSCDRMapper mapper = new SMSCDRMapper();
SMSCDRReducer reducer = new SMSCDRReducer();
mapDriver = MapDriver.newMapDriver(mapper);
reduceDriver = ReduceDriver.newReduceDriver(reducer);
mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
mapDriver.setConfiguration(conf);
conf.set("myParameter1", "20");
conf.set("myParameter2", "23");
}
@Test
public void testMapper() {
mapDriver.withInput(new LongWritable(), new Text(
"655209;1;796764372490213;804422938115889;6"));
mapDriver.withOutput(new Text("6"), new IntWritable(1));
try {
mapDriver.runTest();
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
public void testReducer() throws IOException {
List<IntWritable> values = new ArrayList<IntWritable>();
values.add(new IntWritable(1));
values.add(new IntWritable(1));
reduceDriver.withInput(new Text("6"), values);
reduceDriver.withOutput(new Text("6"), new IntWritable(2));
reduceDriver.runTest();
}
@Test
public void testMapReduce() throws IOException {
mapReduceDriver.withInput(new LongWritable(), new Text(
"655209;1;796764372490213;804422938115889;6"));
List<IntWritable> values = new ArrayList<IntWritable>();
values.add(new IntWritable(1));
values.add(new IntWritable(1));
mapReduceDriver.withOutput(new Text("6"), new IntWritable(2));
mapReduceDriver.runTest();
}
}
执行测试方法有error.usemrunit必须在本地机器上安装hadoop吗?
16:03:40,383 DEBUG Shell - Failed to detect a valid hadoop home directory
java.io.IOException: HADOOP_HOME or hadoop.home.dir are not set.
at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:302)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:327)
at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:79)
at org.apache.hadoop.conf.Configuration.getStrings(Configuration.java:1767)
at org.apache.hadoop.io.serializer.SerializationFactory.<init>(SerializationFactory.java:58)
at org.apache.hadoop.mrunit.internal.io.Serialization.<init>(Serialization.java:39)
at org.apache.hadoop.mrunit.TestDriver.getSerialization(TestDriver.java:530)
at org.apache.hadoop.mrunit.TestDriver.copy(TestDriver.java:675)
at org.apache.hadoop.mrunit.TestDriver.copyPair(TestDriver.java:679)
at org.apache.hadoop.mrunit.MapReduceDriverBase.addInput(MapReduceDriverBase.java:66)
at org.apache.hadoop.mrunit.MapReduceDriverBase.withInput(MapReduceDriverBase.java:119)
at com.hadoop.SMSCDRMapperReducerTest.testMapReduce(SMSCDRMapperReducerTest.java:62)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.junit.runners.model.FrameworkMethod.runReflectiveCall(FrameworkMethod.java:45)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)
at org.junit.runners.ParentRunner.run(ParentRunner.java:231)
at org.junit.runners.ParentRunner.schedule(ParentRunner.java:60)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)
at org.junit.runners.ParentRunner.access[=16=]0(ParentRunner.java:50)
at org.junit.runners.ParentRunner.evaluate(ParentRunner.java:222)
at org.junit.runners.ParentRunner.run(ParentRunner.java:300)
at org.junit.runner.JUnitCore.run(JUnitCore.java:157)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:78)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:212)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
16:03:40,406 ERROR Shell - Failed to locate the winutils binary in the hadoop binary path
java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
at org.apache.hadoop.util.Shell.getQualifiedBinPath(Shell.java:355)
at org.apache.hadoop.util.Shell.getWinUtilsPath(Shell.java:370)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:363)
at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:79)
at org.apache.hadoop.conf.Configuration.getStrings(Configuration.java:1767)
at org.apache.hadoop.io.serializer.SerializationFactory.<init>(SerializationFactory.java:58)
at org.apache.hadoop.mrunit.internal.io.Serialization.<init>(Serialization.java:39)
at org.apache.hadoop.mrunit.TestDriver.getSerialization(TestDriver.java:530)
at org.apache.hadoop.mrunit.TestDriver.copy(TestDriver.java:675)
at org.apache.hadoop.mrunit.TestDriver.copyPair(TestDriver.java:679)
at org.apache.hadoop.mrunit.MapReduceDriverBase.addInput(MapReduceDriverBase.java:66)
at org.apache.hadoop.mrunit.MapReduceDriverBase.withInput(MapReduceDriverBase.java:119)
at com.hadoop.SMSCDRMapperReducerTest.testMapReduce(SMSCDRMapperReducerTest.java:62)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.junit.runners.model.FrameworkMethod.runReflectiveCall(FrameworkMethod.java:45)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)
at org.junit.runners.ParentRunner.run(ParentRunner.java:231)
at org.junit.runners.ParentRunner.schedule(ParentRunner.java:60)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)
at org.junit.runners.ParentRunner.access[=16=]0(ParentRunner.java:50)
at org.junit.runners.ParentRunner.evaluate(ParentRunner.java:222)
at org.junit.runners.ParentRunner.run(ParentRunner.java:300)
at org.junit.runner.JUnitCore.run(JUnitCore.java:157)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:78)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:212)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
16:03:40,447 DEBUG MapReduceDriver - Starting map phase with mapper: com.hadoop.SMSCDRMapper@1a74d2a
16:03:40,970 DEBUG MapReduceDriver - Starting reduce phase with reducer: com.hadoop.SMSCDRReducer@1b18eb3
16:03:41,010 DEBUG ReducePhaseRunner - Reducing input ((6, 1))
16:03:41,103 ERROR TestDriver - Missing expected output (6, 2) at position 0, got (6, 1).
java.lang.AssertionError: 1 Error(s): (Missing expected output (6, 2) at position 0, got (6, 1).)
at org.junit.Assert.fail(Assert.java:93)
at org.apache.hadoop.mrunit.internal.util.Errors.assertNone(Errors.java:73)
at org.apache.hadoop.mrunit.TestDriver.validate(TestDriver.java:768)
at org.apache.hadoop.mrunit.TestDriver.runTest(TestDriver.java:641)
at org.apache.hadoop.mrunit.TestDriver.runTest(TestDriver.java:627)
at com.hadoop.SMSCDRMapperReducerTest.testMapReduce(SMSCDRMapperReducerTest.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at org.junit.runners.model.FrameworkMethod.runReflectiveCall(FrameworkMethod.java:45)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)
at org.junit.runners.ParentRunner.run(ParentRunner.java:231)
at org.junit.runners.ParentRunner.schedule(ParentRunner.java:60)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)
at org.junit.runners.ParentRunner.access[=16=]0(ParentRunner.java:50)
at org.junit.runners.ParentRunner.evaluate(ParentRunner.java:222)
at org.junit.runners.ParentRunner.run(ParentRunner.java:300)
at org.junit.runner.JUnitCore.run(JUnitCore.java:157)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:78)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:212)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
如何解决问题?
我也有类似的问题,通过用 "hadoop-yarn-common" 替换依赖项 "hadoop-common" 解决了这个问题。希望对你也有帮助。
此外,我对你问题中提到的版本“2.6.0-mr1-cdh5.4.2”的依赖关系 "hadoop-core" 持怀疑态度,我认为这是为了 mapreduce1(而不是 mapreduce2)兼容性。
最后,mrunit 对 hadoop2 的使用还有一个 error.mrunit 对 hadoop1 的使用是 ok.below 是 change maven:
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.0.0-mr1-cdh4.4.0</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>mockito-all</artifactId>
<groupId>org.mockito</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.0.0-cdh4.4.0</version>
</dependency>
<dependency>
<groupId>org.apache.mrunit</groupId>
<artifactId>mrunit</artifactId>
<version>1.1.0</version>
<classifier>hadoop2</classifier>
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>
我想测试hadoop2 mapreduce代码,使用mrunit但是有一台error.local机器不要在windows中安装hadoop。 pom.xml 下面:
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>2.6.0-mr1-cdh5.4.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.6.0-cdh5.4.2</version>
</dependency>
<dependency>
<groupId>org.apache.mrunit</groupId>
<artifactId>mrunit</artifactId>
<version>1.1.0</version>
<classifier>hadoop2</classifier>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>
地图代码如下:
package com.hadoop;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
/**
* Created by zhu on 2015/6/3.
*/
public class SMSCDRMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
private Text status = new Text();
private final static IntWritable addOne = new IntWritable(1);
/**
* Returns the SMS status code and its count
*/
protected void map(LongWritable key, Text value, Context context)
throws java.io.IOException, InterruptedException {
//655209;1;796764372490213;804422938115889;6 is the Sample record format
String[] line = value.toString().split(";");
// If record is of SMS CDR
if (Integer.parseInt(line[1]) == 1) {
status.set(line[4]);
context.write(status, addOne);
}
}
}
减少以下代码:
package com.hadoop;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
/**
* Created by zhu on 2015/6/3.
*/
public class SMSCDRReducer extends
Reducer<Text, IntWritable, Text, IntWritable> {
protected void reduce(Text key, Iterable<IntWritable> values, Context context) throws java.io.IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
context.write(key, new IntWritable(sum));
}
}
mrunit代码测试如下:
package com.hadoop;
/**
* Created by zhu on 2015/6/3.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mrunit.mapreduce.MapDriver;
import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
import org.apache.hadoop.mrunit.mapreduce.ReduceDriver;
import org.junit.Before;
import org.junit.Test;
public class SMSCDRMapperReducerTest {
MapDriver<LongWritable, Text, Text, IntWritable> mapDriver;
ReduceDriver<Text, IntWritable, Text, IntWritable> reduceDriver;
MapReduceDriver<LongWritable, Text, Text, IntWritable, Text, IntWritable> mapReduceDriver;
Configuration conf = new Configuration();
@Before
public void setUp() {
SMSCDRMapper mapper = new SMSCDRMapper();
SMSCDRReducer reducer = new SMSCDRReducer();
mapDriver = MapDriver.newMapDriver(mapper);
reduceDriver = ReduceDriver.newReduceDriver(reducer);
mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
mapDriver.setConfiguration(conf);
conf.set("myParameter1", "20");
conf.set("myParameter2", "23");
}
@Test
public void testMapper() {
mapDriver.withInput(new LongWritable(), new Text(
"655209;1;796764372490213;804422938115889;6"));
mapDriver.withOutput(new Text("6"), new IntWritable(1));
try {
mapDriver.runTest();
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
public void testReducer() throws IOException {
List<IntWritable> values = new ArrayList<IntWritable>();
values.add(new IntWritable(1));
values.add(new IntWritable(1));
reduceDriver.withInput(new Text("6"), values);
reduceDriver.withOutput(new Text("6"), new IntWritable(2));
reduceDriver.runTest();
}
@Test
public void testMapReduce() throws IOException {
mapReduceDriver.withInput(new LongWritable(), new Text(
"655209;1;796764372490213;804422938115889;6"));
List<IntWritable> values = new ArrayList<IntWritable>();
values.add(new IntWritable(1));
values.add(new IntWritable(1));
mapReduceDriver.withOutput(new Text("6"), new IntWritable(2));
mapReduceDriver.runTest();
}
}
执行测试方法有error.usemrunit必须在本地机器上安装hadoop吗?
16:03:40,383 DEBUG Shell - Failed to detect a valid hadoop home directory
java.io.IOException: HADOOP_HOME or hadoop.home.dir are not set.
at org.apache.hadoop.util.Shell.checkHadoopHome(Shell.java:302)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:327)
at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:79)
at org.apache.hadoop.conf.Configuration.getStrings(Configuration.java:1767)
at org.apache.hadoop.io.serializer.SerializationFactory.<init>(SerializationFactory.java:58)
at org.apache.hadoop.mrunit.internal.io.Serialization.<init>(Serialization.java:39)
at org.apache.hadoop.mrunit.TestDriver.getSerialization(TestDriver.java:530)
at org.apache.hadoop.mrunit.TestDriver.copy(TestDriver.java:675)
at org.apache.hadoop.mrunit.TestDriver.copyPair(TestDriver.java:679)
at org.apache.hadoop.mrunit.MapReduceDriverBase.addInput(MapReduceDriverBase.java:66)
at org.apache.hadoop.mrunit.MapReduceDriverBase.withInput(MapReduceDriverBase.java:119)
at com.hadoop.SMSCDRMapperReducerTest.testMapReduce(SMSCDRMapperReducerTest.java:62)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.junit.runners.model.FrameworkMethod.runReflectiveCall(FrameworkMethod.java:45)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)
at org.junit.runners.ParentRunner.run(ParentRunner.java:231)
at org.junit.runners.ParentRunner.schedule(ParentRunner.java:60)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)
at org.junit.runners.ParentRunner.access[=16=]0(ParentRunner.java:50)
at org.junit.runners.ParentRunner.evaluate(ParentRunner.java:222)
at org.junit.runners.ParentRunner.run(ParentRunner.java:300)
at org.junit.runner.JUnitCore.run(JUnitCore.java:157)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:78)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:212)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
16:03:40,406 ERROR Shell - Failed to locate the winutils binary in the hadoop binary path
java.io.IOException: Could not locate executable null\bin\winutils.exe in the Hadoop binaries.
at org.apache.hadoop.util.Shell.getQualifiedBinPath(Shell.java:355)
at org.apache.hadoop.util.Shell.getWinUtilsPath(Shell.java:370)
at org.apache.hadoop.util.Shell.<clinit>(Shell.java:363)
at org.apache.hadoop.util.StringUtils.<clinit>(StringUtils.java:79)
at org.apache.hadoop.conf.Configuration.getStrings(Configuration.java:1767)
at org.apache.hadoop.io.serializer.SerializationFactory.<init>(SerializationFactory.java:58)
at org.apache.hadoop.mrunit.internal.io.Serialization.<init>(Serialization.java:39)
at org.apache.hadoop.mrunit.TestDriver.getSerialization(TestDriver.java:530)
at org.apache.hadoop.mrunit.TestDriver.copy(TestDriver.java:675)
at org.apache.hadoop.mrunit.TestDriver.copyPair(TestDriver.java:679)
at org.apache.hadoop.mrunit.MapReduceDriverBase.addInput(MapReduceDriverBase.java:66)
at org.apache.hadoop.mrunit.MapReduceDriverBase.withInput(MapReduceDriverBase.java:119)
at com.hadoop.SMSCDRMapperReducerTest.testMapReduce(SMSCDRMapperReducerTest.java:62)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.junit.runners.model.FrameworkMethod.runReflectiveCall(FrameworkMethod.java:45)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)
at org.junit.runners.ParentRunner.run(ParentRunner.java:231)
at org.junit.runners.ParentRunner.schedule(ParentRunner.java:60)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)
at org.junit.runners.ParentRunner.access[=16=]0(ParentRunner.java:50)
at org.junit.runners.ParentRunner.evaluate(ParentRunner.java:222)
at org.junit.runners.ParentRunner.run(ParentRunner.java:300)
at org.junit.runner.JUnitCore.run(JUnitCore.java:157)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:78)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:212)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
16:03:40,447 DEBUG MapReduceDriver - Starting map phase with mapper: com.hadoop.SMSCDRMapper@1a74d2a
16:03:40,970 DEBUG MapReduceDriver - Starting reduce phase with reducer: com.hadoop.SMSCDRReducer@1b18eb3
16:03:41,010 DEBUG ReducePhaseRunner - Reducing input ((6, 1))
16:03:41,103 ERROR TestDriver - Missing expected output (6, 2) at position 0, got (6, 1).
java.lang.AssertionError: 1 Error(s): (Missing expected output (6, 2) at position 0, got (6, 1).)
at org.junit.Assert.fail(Assert.java:93)
at org.apache.hadoop.mrunit.internal.util.Errors.assertNone(Errors.java:73)
at org.apache.hadoop.mrunit.TestDriver.validate(TestDriver.java:768)
at org.apache.hadoop.mrunit.TestDriver.runTest(TestDriver.java:641)
at org.apache.hadoop.mrunit.TestDriver.runTest(TestDriver.java:627)
at com.hadoop.SMSCDRMapperReducerTest.testMapReduce(SMSCDRMapperReducerTest.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at org.junit.runners.model.FrameworkMethod.runReflectiveCall(FrameworkMethod.java:45)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:42)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:263)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:68)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:47)
at org.junit.runners.ParentRunner.run(ParentRunner.java:231)
at org.junit.runners.ParentRunner.schedule(ParentRunner.java:60)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:229)
at org.junit.runners.ParentRunner.access[=16=]0(ParentRunner.java:50)
at org.junit.runners.ParentRunner.evaluate(ParentRunner.java:222)
at org.junit.runners.ParentRunner.run(ParentRunner.java:300)
at org.junit.runner.JUnitCore.run(JUnitCore.java:157)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:78)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:212)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
如何解决问题?
我也有类似的问题,通过用 "hadoop-yarn-common" 替换依赖项 "hadoop-common" 解决了这个问题。希望对你也有帮助。
此外,我对你问题中提到的版本“2.6.0-mr1-cdh5.4.2”的依赖关系 "hadoop-core" 持怀疑态度,我认为这是为了 mapreduce1(而不是 mapreduce2)兼容性。
最后,mrunit 对 hadoop2 的使用还有一个 error.mrunit 对 hadoop1 的使用是 ok.below 是 change maven:
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>2.0.0-mr1-cdh4.4.0</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<artifactId>mockito-all</artifactId>
<groupId>org.mockito</groupId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>2.0.0-cdh4.4.0</version>
</dependency>
<dependency>
<groupId>org.apache.mrunit</groupId>
<artifactId>mrunit</artifactId>
<version>1.1.0</version>
<classifier>hadoop2</classifier>
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.9.5</version>
<scope>test</scope>
</dependency>