Java源码示例:org.apache.hadoop.mrunit.mapreduce.MapReduceDriver

示例1
@Test
public void testFullTableSize() throws IOException {

    Value value = new Value(new byte[0]);

    Mutation m = new Mutation(new Text("subjectpredicateobject" + DELIM + "FullTableCardinality"));
    m.put(new Text("FullTableCardinality"), new Text("15"), new Value(new byte[0]));

    new MapReduceDriver<Key, Value, Text, IntWritable, Text, Mutation>()
            .withMapper(new FullTableSize.FullTableMapper()).withInput(new Key(new Text("entry1")), value)
            .withInput(new Key(new Text("entry2")), value).withInput(new Key(new Text("entry3")), value)
            .withInput(new Key(new Text("entry4")), value).withInput(new Key(new Text("entry5")), value)
            .withInput(new Key(new Text("entry6")), value).withInput(new Key(new Text("entry7")), value)
            .withInput(new Key(new Text("entry8")), value).withInput(new Key(new Text("entry9")), value)
            .withInput(new Key(new Text("entry10")), value).withInput(new Key(new Text("entry11")), value)
            .withInput(new Key(new Text("entry12")), value).withInput(new Key(new Text("entry13")), value)
            .withInput(new Key(new Text("entry14")), value).withInput(new Key(new Text("entry15")), value)
            .withCombiner(new FullTableSize.FullTableCombiner()).withReducer(new FullTableSize.FullTableReducer())
            .withOutput(new Text(""), m).runTest();

}
 
示例2
public static void verifyMapReduce(SmartMapper mapper, SmartReducer reducer, Object key, Object input)
    throws Exception
{
  MapDriver mapDriver = new MapDriver();
  mapDriver.setMapper(mapper);
  MapReduceDriver mapReduceDriver = new MapReduceDriver();
  mapReduceDriver.setMapper(mapper);
  Object writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  Object writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapDriver.withInput(writableKey, writableValue);
  List results = mapDriver.run();
  Collections.sort(results, PairComparer.INSTANCE);
  mapReduceDriver = new MapReduceDriver<LongWritable, Text, Text, LongWritable, Text, LongWritable>();
  writableKey = WritableUtils.createWritable(key, mapper.getKeyInType());
  writableValue = WritableUtils.createWritable(input, mapper.getValueInType());
  mapReduceDriver.withInput(writableKey, writableValue);
  mapReduceDriver.setMapper(mapper);
  mapReduceDriver.setReducer(reducer);
  List finalResults = mapReduceDriver.run();
  String text = String.format("[%s]\n\n -> maps via %s to -> \n\n%s\n\n -> reduces via %s to -> \n\n%s", input,
      mapper.getClass().getSimpleName(), ArrayUtils.toString(results, Echo.INSTANCE),
      reducer.getClass().getSimpleName(), ArrayUtils.toString(finalResults, Echo.INSTANCE));
  Approvals.verify(text);
}
 
示例3
@Before
public void setUp() throws Exception {
    createTestMetadata();

    // hack for distributed cache
    FileUtils.deleteDirectory(new File("./meta"));
    FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl().toString()), new File("./meta"));

    NDCuboidMapper mapper = new NDCuboidMapper();
    CuboidReducer reducer = new CuboidReducer();
    mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
}
 
示例4
@Before
public void setUp()
{
    SMSCDRMapper mapper = new SMSCDRMapper();
    SMSCDRReducer reducer = new SMSCDRReducer();
    mapDriver = MapDriver.newMapDriver(mapper);
    reduceDriver = ReduceDriver.newReduceDriver(reducer);
    mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
}
 
示例5
@Before
public void setUp() throws Exception {
    createTestMetadata();

    // hack for distributed cache
    FileUtils.deleteDirectory(new File("./meta"));
    FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl().toString()), new File("./meta"));

    NDCuboidMapper mapper = new NDCuboidMapper();
    CuboidReducer reducer = new CuboidReducer();
    mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
}
 
示例6
@Before
public void setUp() throws Exception {
    createTestMetadata();

    // hack for distributed cache
    FileUtils.deleteDirectory(new File("../job/meta"));
    FileUtils.copyDirectory(new File(getTestConfig().getMetadataUrl()), new File("../job/meta"));

    NDCuboidMapper mapper = new NDCuboidMapper();
    CuboidReducer reducer = new CuboidReducer();
    mapReduceDriver = MapReduceDriver.newMapReduceDriver(mapper, reducer);
}
 
示例7
@Before
public void setUp() {
  mapper = new Mapper<Text, Text, Text, Text>();
  reducer = new Reducer<Text, Text, Text, Text>();
  driver = new MapReduceDriver<Text, Text, Text, Text, Text, Text>(mapper, reducer);
}