java - In a MapReduce , how to send arraylist as value from mapper to reducer -


this question has answer here:

how can pass arraylist value mapper reducer.

my code has rules work , create new values(string) based on rules.i maintaining outputs(generated after rule execution) in list , need send output(mapper value) reducer , not have way so.

can 1 please point me direction

adding code

package develop;  import java.io.bufferedreader; import java.io.filereader; import java.io.ioexception; import java.net.uri; import java.net.urisyntaxexception; import java.util.arraylist; import java.util.linkedhashmap; import java.util.list; import java.util.map;  import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.path; import org.apache.hadoop.io.text; import org.apache.hadoop.mapreduce.job; import org.apache.hadoop.mapreduce.mapper; import org.apache.hadoop.mapreduce.lib.input.fileinputformat; import org.apache.hadoop.mapreduce.lib.output.fileoutputformat; import org.apache.hadoop.mapreduce.lib.output.multipleoutputs;  import utility.rulesextractionutility;  public class custommap{       public static class customermapper extends mapper<object, text, text, text> {         private map<string, string> rules;         @override         public void setup(context context)         {              try             {                 uri[] cachefiles = context.getcachefiles();                 setuprulesmap(cachefiles[0].tostring());             }             catch (ioexception ioe)             {                 system.err.println("error reading state file.");                 system.exit(1);             }          }          public void map(object key, text value, context context) throws ioexception, interruptedexception {  //          map<string, string> rules = new linkedhashmap<string, string>(); //          rules.put("targetcolumn[1]", "assign(source[0])"); //          rules.put("targetcolumn[2]", "income(source[2]+source[3])"); //          rules.put("targetcolumn[3]", "assign(source[1]");  //          above "rules", create list values source file              string [] splitsource = value.tostring().split(" ");              list<string>lists=rulesextractionutility.rulesengineexecutor(splitsource,rules);  //          lists have values (name, age) each line huge text file, want write in context , pass reducer. //          of havent implemented reducer code, m stuck passing value mapper.  //          context.write(new text(), lists);---- not have way of doing           }             private void setuprulesmap(string filename) throws ioexception         {             map<string, string> rule = new linkedhashmap<string, string>();             bufferedreader reader = new bufferedreader(new filereader(filename));             string line = reader.readline();             while (line != null)             {                 string[] split = line.split("=");                 rule.put(split[0], split[1]);                 line = reader.readline();                  // rules logic             }             rules = rule;         }     }     public static void main(string[] args) throws illegalargumentexception, ioexception, classnotfoundexception, interruptedexception, urisyntaxexception {       configuration conf = new configuration();     if (args.length != 2) {         system.err.println("usage: customermapper <in> <out>");         system.exit(2);     }     job job = job.getinstance(conf);     job.setjarbyclass(custommap.class);     job.setmapperclass(customermapper.class);     job.addcachefile(new uri("some hdfs location"));       uri[] cachefiles= job.getcachefiles();     if(cachefiles != null) {         (uri cachefile : cachefiles) {             system.out.println("cache file ->" + cachefile);         }     }     // job.setreducerclass(reducer.class);     job.setoutputkeyclass(text.class);     job.setoutputvalueclass(text.class);      fileinputformat.addinputpath(job, new path(args[0]));     fileoutputformat.setoutputpath(job, new path(args[1]));      system.exit(job.waitforcompletion(true) ? 0 : 1); } } 

to pass arraylist mapper reducer, it's clear objects must implement writable interface. why don't try library?

<dependency>     <groupid>org.apache.giraph</groupid>     <artifactid>giraph-core</artifactid>     <version>1.1.0-hadoop2</version> </dependency> 

it has abstract class:

public abstract class arraylistwritable<m extends org.apache.hadoop.io.writable> extends arraylist<m> implements org.apache.hadoop.io.writable, org.apache.hadoop.conf.configurable 

you create own class , source code filling abstract methods , implementing interface methods code. instance:

public class mylistwritable extends arraylistwritable<text>{     ... } 

Comments

Popular posts from this blog

twig - Using Twigbridge in a Laravel 5.1 Package -

jdbc - Not able to establish database connection in eclipse -

Kivy: Swiping (Carousel & ScreenManager) -