Merge branch 'fullstack_genomix' of https://code.google.com/p/hyracks into fullstack_genomix

Conflicts:
	genomix/genomix-data/src/main/java/edu/uci/ics/genomix/type/KmerBytesWritable.java
diff --git a/genomix/genomix-data/src/main/java/edu/uci/ics/genomix/type/KmerBytesWritable.java b/genomix/genomix-data/src/main/java/edu/uci/ics/genomix/type/KmerBytesWritable.java
index c211ce7..1f790eb 100644
--- a/genomix/genomix-data/src/main/java/edu/uci/ics/genomix/type/KmerBytesWritable.java
+++ b/genomix/genomix-data/src/main/java/edu/uci/ics/genomix/type/KmerBytesWritable.java
@@ -183,7 +183,11 @@
         int pos = ((kmerlength - 1) % 4) << 1;
         byte code = (byte) (c << pos);
         bytes[0] = (byte) (((bytes[0] >>> 2) & 0x3f) | code);
+<<<<<<< HEAD
+        return (byte) output;
+=======
         return output;
+>>>>>>> 230f23a445dc7904a45bc535ebd73c3f4c619081
     }
 
     /**
@@ -216,7 +220,11 @@
             bytes[0] &= (1 << ((kmerlength % 4) << 1)) - 1;
         }
         bytes[size - 1] = (byte) ((bytes[size - 1] << 2) | c);
+<<<<<<< HEAD
+        return (byte) output;
+=======
         return output;
+>>>>>>> 230f23a445dc7904a45bc535ebd73c3f4c619081
     }
 
     public void set(KmerBytesWritable newData) {
diff --git a/genomix/genomix-hadoop/actual1/conf.xml b/genomix/genomix-hadoop/actual1/conf.xml
index 506913d..ea51bd0 100644
--- a/genomix/genomix-hadoop/actual1/conf.xml
+++ b/genomix/genomix-hadoop/actual1/conf.xml
@@ -12,7 +12,7 @@
 <property><name>dfs.namenode.logging.level</name><value>info</value></property>
 <property><name>dfs.datanode.address</name><value>127.0.0.1:0</value></property>
 <property><name>io.skip.checksum.errors</name><value>false</value></property>
-<property><name>fs.default.name</name><value>hdfs://localhost:61115</value></property>
+<property><name>fs.default.name</name><value>hdfs://localhost:54837</value></property>
 <property><name>mapred.child.tmp</name><value>./tmp</value></property>
 <property><name>fs.har.impl.disable.cache</name><value>true</value></property>
 <property><name>dfs.safemode.threshold.pct</name><value>0.999f</value></property>
@@ -125,7 +125,7 @@
 <property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
 <property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
 <property><name>dfs.namenode.decommission.interval</name><value>3</value></property>
-<property><name>dfs.http.address</name><value>localhost:61116</value></property>
+<property><name>dfs.http.address</name><value>localhost:54838</value></property>
 <property><name>dfs.heartbeat.interval</name><value>3</value></property>
 <property><name>mapred.job.tracker</name><value>local</value></property>
 <property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
diff --git a/genomix/genomix-hadoop/actual1/result1/.part-00000.crc b/genomix/genomix-hadoop/actual1/result1/.part-00000.crc
index 3422e04..93d0276 100644
--- a/genomix/genomix-hadoop/actual1/result1/.part-00000.crc
+++ b/genomix/genomix-hadoop/actual1/result1/.part-00000.crc
Binary files differ
diff --git a/genomix/genomix-hadoop/actual1/result1/part-00000 b/genomix/genomix-hadoop/actual1/result1/part-00000
index c21f5f6..57a1fd6 100755
--- a/genomix/genomix-hadoop/actual1/result1/part-00000
+++ b/genomix/genomix-hadoop/actual1/result1/part-00000
Binary files differ
diff --git a/genomix/genomix-hadoop/actual2/conf.xml b/genomix/genomix-hadoop/actual2/conf.xml
index ff11b9e..e8ae529 100644
--- a/genomix/genomix-hadoop/actual2/conf.xml
+++ b/genomix/genomix-hadoop/actual2/conf.xml
@@ -12,7 +12,7 @@
 <property><name>dfs.namenode.logging.level</name><value>info</value></property>
 <property><name>dfs.datanode.address</name><value>127.0.0.1:0</value></property>
 <property><name>io.skip.checksum.errors</name><value>false</value></property>
-<property><name>fs.default.name</name><value>hdfs://localhost:61195</value></property>
+<property><name>fs.default.name</name><value>hdfs://localhost:54877</value></property>
 <property><name>mapred.child.tmp</name><value>./tmp</value></property>
 <property><name>fs.har.impl.disable.cache</name><value>true</value></property>
 <property><name>dfs.safemode.threshold.pct</name><value>0.999f</value></property>
@@ -125,7 +125,7 @@
 <property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
 <property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
 <property><name>dfs.namenode.decommission.interval</name><value>3</value></property>
-<property><name>dfs.http.address</name><value>localhost:61196</value></property>
+<property><name>dfs.http.address</name><value>localhost:54878</value></property>
 <property><name>dfs.heartbeat.interval</name><value>3</value></property>
 <property><name>mapred.job.tracker</name><value>local</value></property>
 <property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
diff --git a/genomix/genomix-hadoop/actual2/result2/.part-00000.crc b/genomix/genomix-hadoop/actual2/result2/.part-00000.crc
index 3f8c2c5..9019bf8 100644
--- a/genomix/genomix-hadoop/actual2/result2/.part-00000.crc
+++ b/genomix/genomix-hadoop/actual2/result2/.part-00000.crc
Binary files differ
diff --git a/genomix/genomix-hadoop/actual2/result2/part-00000 b/genomix/genomix-hadoop/actual2/result2/part-00000
index ea3e875..c72ced3 100755
--- a/genomix/genomix-hadoop/actual2/result2/part-00000
+++ b/genomix/genomix-hadoop/actual2/result2/part-00000
Binary files differ
diff --git a/genomix/genomix-hadoop/actual3/complete2/.complete2-r-00000.crc b/genomix/genomix-hadoop/actual3/complete2/.complete2-r-00000.crc
deleted file mode 100644
index b0b2753..0000000
--- a/genomix/genomix-hadoop/actual3/complete2/.complete2-r-00000.crc
+++ /dev/null
Binary files differ
diff --git a/genomix/genomix-hadoop/actual3/complete2/complete2-r-00000 b/genomix/genomix-hadoop/actual3/complete2/complete2-r-00000
deleted file mode 100755
index d3d3667..0000000
--- a/genomix/genomix-hadoop/actual3/complete2/complete2-r-00000
+++ /dev/null
Binary files differ
diff --git a/genomix/genomix-hadoop/actual3/conf.xml b/genomix/genomix-hadoop/actual3/conf.xml
index 16a0edc..9c045a4 100644
--- a/genomix/genomix-hadoop/actual3/conf.xml
+++ b/genomix/genomix-hadoop/actual3/conf.xml
@@ -12,7 +12,7 @@
 <property><name>dfs.namenode.logging.level</name><value>info</value></property>
 <property><name>dfs.datanode.address</name><value>127.0.0.1:0</value></property>
 <property><name>io.skip.checksum.errors</name><value>false</value></property>
-<property><name>fs.default.name</name><value>hdfs://localhost:62106</value></property>
+<property><name>fs.default.name</name><value>hdfs://localhost:55383</value></property>
 <property><name>mapred.child.tmp</name><value>./tmp</value></property>
 <property><name>fs.har.impl.disable.cache</name><value>true</value></property>
 <property><name>dfs.safemode.threshold.pct</name><value>0.999f</value></property>
@@ -125,7 +125,7 @@
 <property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
 <property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
 <property><name>dfs.namenode.decommission.interval</name><value>3</value></property>
-<property><name>dfs.http.address</name><value>localhost:62107</value></property>
+<property><name>dfs.http.address</name><value>localhost:55384</value></property>
 <property><name>dfs.heartbeat.interval</name><value>3</value></property>
 <property><name>mapred.job.tracker</name><value>local</value></property>
 <property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
diff --git a/genomix/genomix-hadoop/compare/result3/comparesource.txt b/genomix/genomix-hadoop/compare/result3/comparesource.txt
index 5f9dd78..ea6c574 100644
--- a/genomix/genomix-hadoop/compare/result3/comparesource.txt
+++ b/genomix/genomix-hadoop/compare/result3/comparesource.txt
@@ -1 +1,2 @@
-02 71	66	1
+CATC	68	1
+TCG	18	0
diff --git a/genomix/genomix-hadoop/data/webmap/text.txt b/genomix/genomix-hadoop/data/webmap/text.txt
deleted file mode 100755
index c6cd7fe..0000000
--- a/genomix/genomix-hadoop/data/webmap/text.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-@625E1AAXX100810:1:100:10000:10271/1

-AATAGAAGATCGAT

-+

-EDBDB?BEEEDGGEGGGDGGGA>DG@GGD;GD@DG@F?<B<BFFD?

diff --git a/genomix/genomix-hadoop/pom.xml b/genomix/genomix-hadoop/pom.xml
index f791f5b..ff6a7ee 100755
--- a/genomix/genomix-hadoop/pom.xml
+++ b/genomix/genomix-hadoop/pom.xml
@@ -22,8 +22,9 @@
 				<artifactId>maven-compiler-plugin</artifactId>
 				<version>2.0.2</version>
 				<configuration>
-					<source>1.6</source>
-					<target>1.6</target>
+					<source>1.7</source>
+					<target>1.7</target>
+					<fork>true</fork>
 				</configuration>
 			</plugin>
 			<plugin>
diff --git a/genomix/genomix-hadoop/src/.DS_Store b/genomix/genomix-hadoop/src/.DS_Store
new file mode 100644
index 0000000..e0bf627
--- /dev/null
+++ b/genomix/genomix-hadoop/src/.DS_Store
Binary files differ
diff --git a/genomix/genomix-hadoop/src/main/.DS_Store b/genomix/genomix-hadoop/src/main/.DS_Store
new file mode 100644
index 0000000..325c6de
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/.DS_Store
Binary files differ
diff --git a/genomix/genomix-hadoop/src/main/java/.DS_Store b/genomix/genomix-hadoop/src/main/java/.DS_Store
new file mode 100644
index 0000000..dd6c872
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/.DS_Store
Binary files differ
diff --git a/genomix/genomix-hadoop/src/main/java/edu/.DS_Store b/genomix/genomix-hadoop/src/main/java/edu/.DS_Store
new file mode 100644
index 0000000..5e0c641
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/.DS_Store
Binary files differ
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/.DS_Store b/genomix/genomix-hadoop/src/main/java/edu/uci/.DS_Store
new file mode 100644
index 0000000..4f27e83
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/.DS_Store
Binary files differ
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/.DS_Store b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/.DS_Store
new file mode 100644
index 0000000..92eedd1
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/.DS_Store
Binary files differ
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathDriver.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathDriver.java
index cfdf8d5..31995b6 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathDriver.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathDriver.java
@@ -23,12 +23,15 @@
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
 import org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat;
+import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
 import org.kohsuke.args4j.CmdLineParser;
 import org.kohsuke.args4j.Option;
 
 import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
 
 @SuppressWarnings("deprecation")
 public class MergePathDriver {
@@ -74,7 +77,7 @@
         conf.setInputFormat(SequenceFileInputFormat.class);
         conf.setOutputFormat(SequenceFileOutputFormat.class);
         
-        conf.setOutputKeyClass(KmerBytesWritable.class);
+        conf.setOutputKeyClass(VKmerBytesWritable.class);
         conf.setOutputValueClass(MergePathValueWritable.class);
         
         FileInputFormat.setInputPaths(conf, new Path(inputPath));
@@ -83,8 +86,9 @@
         FileSystem dfs = FileSystem.get(conf);
         dfs.delete(new Path(inputPath + "-step1"), true);
         JobClient.runJob(conf);
+        int iMerge = 0;
 /*----------------------------------------------------------------------*/
-        for(int iMerge = 0; iMerge < mergeRound; iMerge ++){
+        for(iMerge = 0; iMerge < mergeRound; iMerge ++){
         
             conf = new JobConf(MergePathDriver.class);
             conf.setInt("sizeKmer", sizeKmer);
@@ -98,24 +102,23 @@
             conf.setMapperClass(MergePathMapper.class);
             conf.setReducerClass(MergePathReducer.class);
             
-            conf.setMapOutputKeyClass(KmerBytesWritable.class);
+            conf.setMapOutputKeyClass(VKmerBytesWritable.class);
             conf.setMapOutputValueClass(MergePathValueWritable.class);
             
             conf.setInputFormat(SequenceFileInputFormat.class);
-            conf.setOutputFormat(MultipleSequenceFileOutputFormat.class);
             
             String uncomplete = "uncomplete" + iMerge;
             String complete = "complete" + iMerge;
            
             MultipleOutputs.addNamedOutput(conf, uncomplete,
-                    MergePathMultiSeqOutputFormat.class, KmerBytesWritable.class,
+                    MergePathMultiSeqOutputFormat.class, VKmerBytesWritable.class,
                     MergePathValueWritable.class);
 
             MultipleOutputs.addNamedOutput(conf, complete,
-                    MergePathMultiSeqOutputFormat.class, KmerBytesWritable.class,
+                    MergePathMultiTextOutputFormat.class, VKmerBytesWritable.class,
                     MergePathValueWritable.class);
             
-            conf.setOutputKeyClass(KmerBytesWritable.class);
+            conf.setOutputKeyClass(VKmerBytesWritable.class);
             conf.setOutputValueClass(MergePathValueWritable.class);
             
             FileInputFormat.setInputPaths(conf, new Path(inputPath + "-step1"));
@@ -127,6 +130,46 @@
             dfs.rename(new Path(outputPath + "/" + uncomplete), new Path(inputPath + "-step1"));
             dfs.rename(new Path(outputPath + "/" + complete), new Path(mergeResultPath + "/" + complete));
         }
+        /*----------------------------------------*/
+        conf = new JobConf(MergePathDriver.class);
+        conf.setInt("sizeKmer", sizeKmer);
+        conf.setInt("iMerge", iMerge);
+        
+        if (defaultConfPath != null) {
+            conf.addResource(new Path(defaultConfPath));
+        }
+        conf.setJobName("Path Merge");
+        
+        conf.setMapperClass(MergePathMapper.class);
+        conf.setReducerClass(MergePathReducer.class);
+        
+        conf.setMapOutputKeyClass(VKmerBytesWritable.class);
+        conf.setMapOutputValueClass(MergePathValueWritable.class);
+        
+        conf.setInputFormat(SequenceFileInputFormat.class);
+        
+        String uncomplete = "uncomplete" + iMerge;
+        String complete = "complete" + iMerge;
+       
+        MultipleOutputs.addNamedOutput(conf, uncomplete,
+                MergePathMultiTextOutputFormat.class, VKmerBytesWritable.class,
+                MergePathValueWritable.class);
+
+        MultipleOutputs.addNamedOutput(conf, complete,
+                MergePathMultiTextOutputFormat.class, VKmerBytesWritable.class,
+                MergePathValueWritable.class);
+        
+        conf.setOutputKeyClass(VKmerBytesWritable.class);
+        conf.setOutputValueClass(MergePathValueWritable.class);
+        
+        FileInputFormat.setInputPaths(conf, new Path(inputPath + "-step1"));
+        FileOutputFormat.setOutputPath(conf, new Path(outputPath));
+        conf.setNumReduceTasks(numReducers);
+        dfs.delete(new Path(outputPath), true);
+        JobClient.runJob(conf);
+        dfs.delete(new Path(inputPath + "-step1"), true);
+        dfs.rename(new Path(outputPath + "/" + uncomplete), new Path(inputPath + "-step1"));
+        dfs.rename(new Path(outputPath + "/" + complete), new Path(mergeResultPath + "/" + complete));
     }
 
     public static void main(String[] args) throws Exception {
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathMapper.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathMapper.java
index c3255f2..f7a3457 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathMapper.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathMapper.java
@@ -20,32 +20,30 @@
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-
 import edu.uci.ics.genomix.type.GeneCode;
-import edu.uci.ics.genomix.type.KmerBytesWritable;
 import edu.uci.ics.genomix.type.VKmerBytesWritable;
 import edu.uci.ics.genomix.type.VKmerBytesWritableFactory;
 
 @SuppressWarnings("deprecation")
 public class MergePathMapper extends MapReduceBase implements
-        Mapper<KmerBytesWritable, MergePathValueWritable, KmerBytesWritable, MergePathValueWritable> {
+        Mapper<VKmerBytesWritable, MergePathValueWritable, VKmerBytesWritable, MergePathValueWritable> {
     private int KMER_SIZE;
     private VKmerBytesWritableFactory outputKmerFactory;
-    private MergePathValueWritable outputAdjList; 
+    private MergePathValueWritable outputValue; 
     private VKmerBytesWritable tmpKmer;
     private VKmerBytesWritable outputKmer;
 
     public void configure(JobConf job) {
         KMER_SIZE = job.getInt("sizeKmer", 0);
         outputKmerFactory = new VKmerBytesWritableFactory(KMER_SIZE);
-        outputAdjList = new MergePathValueWritable();
+        outputValue = new MergePathValueWritable();
         tmpKmer = new VKmerBytesWritable(KMER_SIZE);
         outputKmer = new VKmerBytesWritable(KMER_SIZE);
     }
 
     @Override
-    public void map(KmerBytesWritable key, MergePathValueWritable value,
-            OutputCollector<KmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
+    public void map(VKmerBytesWritable key, MergePathValueWritable value,
+            OutputCollector<VKmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
 
         byte precursor = (byte) 0xF0;
         byte succeed = (byte) 0x0F;
@@ -59,14 +57,14 @@
             byte succeedCode = GeneCode.getGeneCodeFromBitMap(succeed);
             tmpKmer.set(outputKmerFactory.getLastKmerFromChain(KMER_SIZE, key));
             outputKmer.set(outputKmerFactory.shiftKmerWithNextCode(tmpKmer, succeedCode));
-
-            KmerBytesWritable mergedKmer = outputKmerFactory.getFirstKmerFromChain(value.getKmerSize()
-                    - (KMER_SIZE - 1), value.getKmer());
-            outputAdjList.set(mergedKmer, adjBitMap, bitFlag);
-            output.collect(outputKmer, outputAdjList);
+            
+            tmpKmer.set(outputKmerFactory.getFirstKmerFromChain(key.getKmerLength() - (KMER_SIZE - 1), key));
+            outputValue.set(tmpKmer, adjBitMap, bitFlag);
+            output.collect(outputKmer, outputValue);
         } else {
-            outputAdjList.set(value);
-            output.collect(key, outputAdjList);
+            outputKmer.set(key);
+            outputValue.set(value);
+            output.collect(key, outputValue);
         }
     }
 }
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathReducer.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathReducer.java
index cead0e8..513b02c 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathReducer.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathReducer.java
@@ -22,19 +22,19 @@
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.lib.MultipleOutputs;
-
-import edu.uci.ics.genomix.type.KmerBytesWritable;
 import edu.uci.ics.genomix.type.VKmerBytesWritable;
 import edu.uci.ics.genomix.type.VKmerBytesWritableFactory;
 
 @SuppressWarnings("deprecation")
 public class MergePathReducer extends MapReduceBase implements
-        Reducer<KmerBytesWritable, MergePathValueWritable, KmerBytesWritable, MergePathValueWritable> {
+        Reducer<VKmerBytesWritable, MergePathValueWritable, VKmerBytesWritable, MergePathValueWritable> {
     private VKmerBytesWritableFactory kmerFactory;
     private VKmerBytesWritable outputKmer;
     private VKmerBytesWritable tmpKmer;
     private int KMER_SIZE;
-    private MergePathValueWritable outputAdjList;
+    private MergePathValueWritable outputValue;
+    private MergePathValueWritable tmpOutputValue;
+    
     MultipleOutputs mos = null;
     private int I_MERGE;
 
@@ -42,7 +42,8 @@
         mos = new MultipleOutputs(job);
         I_MERGE = Integer.parseInt(job.get("iMerge"));
         KMER_SIZE = job.getInt("sizeKmer", 0);
-        outputAdjList = new MergePathValueWritable();
+        outputValue = new MergePathValueWritable();
+        tmpOutputValue = new MergePathValueWritable();
         kmerFactory = new VKmerBytesWritableFactory(KMER_SIZE);
         outputKmer = new VKmerBytesWritable(KMER_SIZE);
         tmpKmer = new VKmerBytesWritable(KMER_SIZE);
@@ -50,58 +51,72 @@
 
     @SuppressWarnings("unchecked")
     @Override
-    public void reduce(KmerBytesWritable key, Iterator<MergePathValueWritable> values,
-            OutputCollector<KmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
-        outputAdjList = values.next();
-
-        
+    public void reduce(VKmerBytesWritable key, Iterator<MergePathValueWritable> values,
+            OutputCollector<VKmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
+        outputValue = values.next();
         if (values.hasNext() == true) {
-
-            if (outputAdjList.getFlag() == 1) {
-                byte adjBitMap = outputAdjList.getAdjBitMap();
-                byte bitFlag = outputAdjList.getFlag();
-                outputKmer.set(kmerFactory.mergeTwoKmer(outputAdjList.getKmer(), key));
-                
-                outputAdjList = values.next();
-                byte nextAdj = outputAdjList.getAdjBitMap();
+            if(outputValue.getFlag() != 1){
+                byte nextAdj = outputValue.getAdjBitMap();
                 byte succeed = (byte) 0x0F;
                 succeed = (byte) (succeed & nextAdj);
-                adjBitMap = (byte) (adjBitMap & 0xF0);
-                adjBitMap = (byte) (adjBitMap | succeed);
-                outputAdjList.set(null, 0, 0, adjBitMap, bitFlag, KMER_SIZE + outputAdjList.getKmerSize());
-
-                mos.getCollector("uncomplete" + I_MERGE, reporter).collect(outputKmer, outputAdjList);
-            } else {
-                byte nextAdj = outputAdjList.getAdjBitMap();
-                byte succeed = (byte) 0x0F;
-                succeed = (byte) (succeed & nextAdj);
-                outputAdjList = values.next();
-                byte adjBitMap = outputAdjList.getAdjBitMap();
-                byte flag = outputAdjList.getFlag();
-                int kmerSize = outputAdjList.getKmerSize();
                 
-                outputKmer.set(kmerFactory.mergeTwoKmer(outputAdjList.getKmer(), key));
+                outputValue = values.next();
+                byte adjBitMap = outputValue.getAdjBitMap();
+                byte flag = outputValue.getFlag();                
+                outputKmer.set(kmerFactory.mergeTwoKmer(outputValue.getKmer(), key));
+
                 adjBitMap = (byte) (adjBitMap & 0xF0);
                 adjBitMap = (byte) (adjBitMap | succeed);
-                outputAdjList.set(null, 0, 0, adjBitMap, flag, KMER_SIZE + kmerSize);
-
-                mos.getCollector("uncomplete" + I_MERGE, reporter).collect(outputKmer, outputAdjList);
+                outputValue.set(null, 0, 0, adjBitMap, flag, 0);
+                mos.getCollector("uncomplete" + I_MERGE, reporter).collect(outputKmer, outputValue);
+            }
+            else{
+                tmpOutputValue.set(outputValue);
+                byte tmpAdjMap = tmpOutputValue.getAdjBitMap();
+                
+                outputValue = values.next();
+                if(outputValue.getFlag() != 1) {
+                    outputKmer.set(kmerFactory.mergeTwoKmer(tmpOutputValue.getKmer(), key));
+                    
+                    byte nextAdj = outputValue.getAdjBitMap();
+                    byte succeed = (byte) 0x0F;
+                    succeed = (byte) (succeed & nextAdj);
+                    tmpAdjMap = (byte) (tmpAdjMap & 0xF0);
+                    tmpAdjMap = (byte) (tmpAdjMap | succeed);
+                    outputValue.set(null, 0, 0, tmpAdjMap, tmpOutputValue.getFlag(), 0);
+                    mos.getCollector("uncomplete" + I_MERGE, reporter).collect(outputKmer, outputValue);
+                }
+                else{
+                    
+                    tmpKmer.set(kmerFactory.getFirstKmerFromChain(KMER_SIZE-1, key));
+                    outputKmer.set(kmerFactory.mergeTwoKmer(tmpOutputValue.getKmer(), tmpKmer));
+                    tmpOutputValue.set(null, 0, 0, tmpAdjMap, tmpOutputValue.getFlag(), 0);
+                    mos.getCollector("complete" + I_MERGE, reporter).collect(outputKmer, tmpOutputValue);
+                    
+                    tmpKmer.set(kmerFactory.getFirstKmerFromChain(KMER_SIZE-1, key));
+                    outputKmer.set(kmerFactory.mergeTwoKmer(outputValue.getKmer(), tmpKmer));                    
+                    outputValue.set(null, 0, 0, outputValue.getAdjBitMap(), outputValue.getFlag(), 0);
+                    mos.getCollector("complete" + I_MERGE, reporter).collect(outputKmer, outputValue);
+                                        
+                    while(values.hasNext()) {
+                        outputValue = values.next();
+                        tmpKmer.set(kmerFactory.getFirstKmerFromChain(KMER_SIZE-1, key));
+                        outputKmer.set(kmerFactory.mergeTwoKmer(outputValue.getKmer(), tmpKmer));                    
+                        outputValue.set(null, 0, 0, outputValue.getAdjBitMap(), outputValue.getFlag(), 0);
+                        mos.getCollector("complete" + I_MERGE, reporter).collect(outputKmer, outputValue);
+                    }
+                }
             }
         } else {
-            if (outputAdjList.getFlag() != 0) {
-                byte adjBitMap = outputAdjList.getAdjBitMap();
-                byte flag = outputAdjList.getFlag();
-                int kmerSize = outputAdjList.getKmerSize();
-                
-                tmpKmer.set(kmerFactory.getFirstKmerFromChain(KMER_SIZE - 1, key));
-                outputKmer.set(kmerFactory.mergeTwoKmer(outputAdjList.getKmer(), tmpKmer));
-                outputAdjList.set(null, 0, 0, adjBitMap, flag, KMER_SIZE + kmerSize);
-                mos.getCollector("complete" + I_MERGE, reporter).collect(outputKmer, outputAdjList);
+            if (outputValue.getFlag() != 0) {
+                tmpKmer.set(kmerFactory.getFirstKmerFromChain(KMER_SIZE-1, key));
+                outputKmer.set(kmerFactory.mergeTwoKmer(outputValue.getKmer(), tmpKmer));                    
+                outputValue.set(null, 0, 0, outputValue.getAdjBitMap(), outputValue.getFlag(), 0);
+                mos.getCollector("complete" + I_MERGE, reporter).collect(outputKmer, outputValue);
             } else
-                mos.getCollector("uncomplete" + I_MERGE, reporter).collect(key, outputAdjList);
+                mos.getCollector("uncomplete" + I_MERGE, reporter).collect(key, outputValue);
         }
     }
-
     public void close() throws IOException {
         // TODO Auto-generated method stub
         mos.close();
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathValueWritable.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathValueWritable.java
index c5ff116..a6e6c92 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathValueWritable.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/MergePathValueWritable.java
@@ -28,115 +28,61 @@
 public class MergePathValueWritable extends BinaryComparable implements WritableComparable<BinaryComparable> {
 
     private static final byte[] EMPTY_BYTES = {};
-    private int size;
-    private byte[] bytes;
-
     private byte adjBitMap;
     private byte flag;
-    private int kmerSize;
-    
     private VKmerBytesWritable kmer;
 
     public MergePathValueWritable() {
-        this((byte) 0, (byte) 0, (byte) 0, EMPTY_BYTES);
+        this((byte) 0, (byte) 0, 0, EMPTY_BYTES);
     }
 
-    public MergePathValueWritable(byte adjBitMap, byte flag, byte kmerSize, byte[] bytes) {
+    public MergePathValueWritable(byte adjBitMap, byte flag, int kmerSize, byte[] bytes) {
         this.adjBitMap = adjBitMap;
         this.flag = flag;
-        this.kmerSize = kmerSize;
-
-        this.bytes = bytes;
-        this.size = bytes.length;
-        this.kmer = new VKmerBytesWritable(kmerSize);
+        this.kmer = new VKmerBytesWritable(kmerSize, bytes);
         kmer.set(bytes, 0, bytes.length);
     }
 
-    public void setSize(int size) {
-        if (size > getCapacity()) {
-            setCapacity(size * 3 / 2);
-        }
-        this.size = size;
+    public void set (MergePathValueWritable right) {
+        set(right.getBytes(), 0, right.getLength(), right.getAdjBitMap(), right.getFlag(), right.getKmerLength());
     }
-
-    public int getCapacity() {
-        return bytes.length;
-    }
-
-    public void setCapacity(int new_cap) {
-        if (new_cap != getCapacity()) {
-            byte[] new_data = new byte[new_cap];
-            if (new_cap < size) {
-                size = new_cap;
-            }
-            if (size != 0) {
-                System.arraycopy(bytes, 0, new_data, 0, size);
-            }
-            bytes = new_data;
-        }
-    }
-
-    public void set(MergePathValueWritable newData) {
-        set(newData.bytes, 0, newData.size, newData.adjBitMap, newData.flag, newData.kmerSize);
-    }
-    
     public void set(KmerBytesWritable mergedKmer, byte adjBitMap, byte bitFlag) {
-        set(mergedKmer.getBytes(),0,mergedKmer.getLength(), adjBitMap, bitFlag, mergedKmer.getKmerLength());
+        set(mergedKmer.getBytes(), 0, mergedKmer.getLength(), adjBitMap, bitFlag, mergedKmer.getKmerLength());
     }
 
     public void set(byte[] newData, int offset, int length, byte adjBitMap, byte flag, int kmerSize) {
-        setSize(0);        
         if (length != 0) {
-            setSize(length);
-            System.arraycopy(newData, offset, bytes, 0, size);
             kmer.set(kmerSize, newData, offset, length);
         }
-            this.adjBitMap = adjBitMap;
-            this.flag = flag;
-            this.kmerSize = kmerSize;
-    }
-    
-    public KmerBytesWritable getKmer(){
-        if (size != 0){
-            return kmer;
-        }
-        return null;
+        this.adjBitMap = adjBitMap;
+        this.flag = flag;
     }
 
     @Override
     public void readFields(DataInput arg0) throws IOException {
         // TODO Auto-generated method stub
-        setSize(0); // clear the old data
-        setSize(arg0.readInt());
-        if(size != 0){
-        arg0.readFully(bytes, 0, size);
-        kmer.set(bytes,0,size);
-        }
+        kmer.readFields(arg0);
         adjBitMap = arg0.readByte();
         flag = arg0.readByte();
-        kmerSize = arg0.readInt();
     }
 
     @Override
     public void write(DataOutput arg0) throws IOException {
         // TODO Auto-generated method stub
-        arg0.writeInt(size);
-        arg0.write(bytes, 0, size);
+        kmer.write(arg0);
         arg0.writeByte(adjBitMap);
         arg0.writeByte(flag);
-        arg0.writeInt(kmerSize);
     }
 
-    @Override
-    public byte[] getBytes() {
-        // TODO Auto-generated method stub
-        return bytes;
+    public KmerBytesWritable getKmer() {
+        if (kmer.getLength() != 0) {
+            return kmer;
+        }
+        return null;
     }
-
-    @Override
-    public int getLength() {
-        // TODO Auto-generated method stub
-        return size;
+    
+    public int getKmerLength() {
+        return kmer.getKmerLength();
     }
 
     public byte getAdjBitMap() {
@@ -147,26 +93,22 @@
         return this.flag;
     }
 
-    public int getKmerSize() {
-        return this.kmerSize;
-    }
-
     public String toString() {
-        StringBuffer sb = new StringBuffer(3 * size);
-        for (int idx = 0; idx < size; idx++) {
-            // if not the first, put a blank separator in
-            if (idx != 0) {
-                sb.append(' ');
-            }
-            String num = Integer.toHexString(0xff & bytes[idx]);
-            // if it is only one digit, add a leading 0.
-            if (num.length() < 2) {
-                sb.append('0');
-            }
-            sb.append(num);
-        }
-        return GeneCode.getSymbolFromBitMap(adjBitMap) + '\t' + String.valueOf(flag) + '\t' + sb.toString();
+        return GeneCode.getSymbolFromBitMap(adjBitMap) + '\t' + String.valueOf(flag) + '\t' + kmer.toString();
     }
 
- 
+    @Override
+    public byte[] getBytes() {
+        // TODO Auto-generated method stub
+        if (kmer.getLength() != 0) {
+            return kmer.getBytes();
+        } else
+            return null;
+    }
+
+
+    @Override
+    public int getLength() {
+        return kmer.getLength();
+    }
 }
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialMapper.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialMapper.java
index ae824e7..0bfe86d 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialMapper.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialMapper.java
@@ -22,8 +22,8 @@
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reporter;
-
 import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.GeneCode;
 
 @SuppressWarnings("deprecation")
 public class SNodeInitialMapper extends MapReduceBase implements
@@ -101,39 +101,34 @@
         byte precursor = (byte) 0xF0;
         byte succeed = (byte) 0x0F;
         byte adjBitMap = value.get();
-        byte flag = (byte) 0;
+        byte bitFlag = (byte) 0;
         precursor = (byte) (precursor & adjBitMap);
         precursor = (byte) ((precursor & 0xff) >> 4);
         succeed = (byte) (succeed & adjBitMap);
         boolean inDegree = measureDegree(precursor);
         boolean outDegree = measureDegree(succeed);
-        byte initial = 0;
-        if (inDegree == true && outDegree == false) {
-            flag = (byte) 2;
-            switch (succeed) {
-                case 1:
-                    initial = (byte) 0x00;
-                    break;
-                case 2:
-                    initial = (byte) 0x01;
-                    break;
-                case 4:
-                    initial = (byte) 0x02;
-                    break;
-                case 8:
-                    initial = (byte) 0x03;
-                    break;
-            }
-            outputKmer.set(key);
-            outputKmer.shiftKmerWithNextCode(initial);
-            adjBitMap = (byte) (adjBitMap & 0xF0);
-            outputAdjList.set(null, 0, 0, adjBitMap, flag, KMER_SIZE);
-            output.collect(outputKmer, outputAdjList);
-        }
+
         if (inDegree == false && outDegree == false) {
             outputKmer.set(key);
-            outputAdjList.set(null, 0, 0, adjBitMap, flag, KMER_SIZE);
+            bitFlag = (byte) 2;
+            outputAdjList.set(null, 0, 0, adjBitMap, bitFlag, KMER_SIZE);///~~~~~kmersize----->0
             output.collect(outputKmer, outputAdjList);
         }
+        else{
+            for(int i = 0 ; i < 4; i ++){
+                byte temp = 0x01;
+                byte shiftedCode = 0;
+                temp  = (byte)(temp << i);
+                temp = (byte) (succeed & temp);
+                if(temp != 0 ){
+                    byte succeedCode = GeneCode.getGeneCodeFromBitMap(temp);
+                    shiftedCode = key.shiftKmerWithNextCode(succeedCode);
+                    outputKmer.set(key);
+                    outputAdjList.set(null, 0, 0, (byte)0, bitFlag, KMER_SIZE);
+                    output.collect(outputKmer, outputAdjList);
+                    key.shiftKmerWithPreCode(shiftedCode);
+                }
+            }
+        }
     }
 }
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialReducer.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialReducer.java
index 734abd6..89daa6c 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialReducer.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmerging/SNodeInitialReducer.java
@@ -16,50 +16,50 @@
 
 import java.io.IOException;
 import java.util.Iterator;
-
 import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
-
 import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
 
 @SuppressWarnings("deprecation")
 public class SNodeInitialReducer extends MapReduceBase implements
-        Reducer<KmerBytesWritable, MergePathValueWritable, KmerBytesWritable, MergePathValueWritable> {
-    private MergePathValueWritable outputAdjList = new MergePathValueWritable();
+        Reducer<KmerBytesWritable, MergePathValueWritable, VKmerBytesWritable, MergePathValueWritable> {
+    private VKmerBytesWritable outputKmer = new VKmerBytesWritable();
+    private MergePathValueWritable outputValue = new MergePathValueWritable();
 
     @Override
     public void reduce(KmerBytesWritable key, Iterator<MergePathValueWritable> values,
-            OutputCollector<KmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
-        outputAdjList = values.next();
+            OutputCollector<VKmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
+        outputKmer.set(key);
+        outputValue = values.next();
         if (values.hasNext() == true) {
-            if (outputAdjList.getFlag() != 2) {
-                byte adjBitMap = outputAdjList.getAdjBitMap();
-                int kmerSize = outputAdjList.getKmerSize();
+            if (outputValue.getFlag() == 2) {
                 byte bitFlag = 1;
-                outputAdjList.set(null, 0, 0, adjBitMap, bitFlag, kmerSize);
-                output.collect(key, outputAdjList);
-
+                outputValue.set(null, 0, 0, outputValue.getAdjBitMap(), bitFlag, outputValue.getKmerLength());///outputValue.getKmerLength()
+                output.collect(outputKmer, outputValue);
             } else {
                 boolean flag = false;
                 while (values.hasNext()) {
-                    outputAdjList = values.next();
-                    if (outputAdjList.getFlag() != 2) {
+                    outputValue = values.next();
+                    if (outputValue.getFlag() == 2) {
                         flag = true;
                         break;
                     }
                 }
                 if (flag == true) {
-                    byte adjBitMap = outputAdjList.getAdjBitMap();
-                    int kmerSize = outputAdjList.getKmerSize();
                     byte bitFlag = 1;
-                    outputAdjList.set(null, 0, 0, adjBitMap, bitFlag, kmerSize);
-                    output.collect(key, outputAdjList);
+                    outputValue.set(null, 0, 0, outputValue.getAdjBitMap(), bitFlag, outputValue.getKmerLength());
+                    output.collect(outputKmer, outputValue);
                 }
             }
         } else {
-            output.collect(key, outputAdjList);
+            if (outputValue.getFlag() == 2) {
+                byte bitFlag = 0;
+                outputValue.set(null, 0, 0, outputValue.getAdjBitMap(), bitFlag, outputValue.getKmerLength());
+                output.collect(outputKmer, outputValue);
+            }
         }
     }
 }
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/.DS_Store b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/.DS_Store
new file mode 100644
index 0000000..7c4ae29
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/.DS_Store
Binary files differ
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Driver.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Driver.java
new file mode 100644
index 0000000..7a18eb7
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Driver.java
@@ -0,0 +1,167 @@
+package edu.uci.ics.pathmergingh2;
+
+import java.io.IOException;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.mapred.TextOutputFormat;
+import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat;
+import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
+
+@SuppressWarnings("deprecation")
+public class MergePathH2Driver {
+    
+    private static class Options {
+        @Option(name = "-inputpath", usage = "the input path", required = true)
+        public String inputPath;
+
+        @Option(name = "-outputpath", usage = "the output path", required = true)
+        public String outputPath;
+
+        @Option(name = "-mergeresultpath", usage = "the merging results path", required = true)
+        public String mergeResultPath;
+        
+        @Option(name = "-num-reducers", usage = "the number of reducers", required = true)
+        public int numReducers;
+
+        @Option(name = "-kmer-size", usage = "the size of kmer", required = true)
+        public int sizeKmer;
+        
+        @Option(name = "-merge-rounds", usage = "the while rounds of merging", required = true)
+        public int mergeRound;
+
+    }
+
+
+    public void run(String inputPath, String outputPath, String mergeResultPath, int numReducers, int sizeKmer, int mergeRound, String defaultConfPath)
+            throws IOException{
+
+        JobConf conf = new JobConf(MergePathH2Driver.class);
+        conf.setInt("sizeKmer", sizeKmer);
+        
+        if (defaultConfPath != null) {
+            conf.addResource(new Path(defaultConfPath));
+        }
+        conf.setJobName("Initial Path-Starting-Points Table");
+        conf.setMapperClass(SNodeInitialMapper.class); 
+        conf.setReducerClass(SNodeInitialReducer.class);
+        
+        conf.setMapOutputKeyClass(KmerBytesWritable.class);
+        conf.setMapOutputValueClass(MergePathValueWritable.class);
+        
+        conf.setInputFormat(SequenceFileInputFormat.class);
+        conf.setOutputFormat(TextOutputFormat.class);
+        
+        conf.setOutputKeyClass(VKmerBytesWritable.class);
+        conf.setOutputValueClass(MergePathValueWritable.class);
+        
+        FileInputFormat.setInputPaths(conf, new Path(inputPath));
+        FileOutputFormat.setOutputPath(conf, new Path(outputPath));
+        conf.setNumReduceTasks(numReducers);
+        FileSystem dfs = FileSystem.get(conf);
+        dfs.delete(new Path(outputPath), true);
+        JobClient.runJob(conf);
+        int iMerge = 0;
+/*----------------------------------------------------------------------*/
+/*        for(iMerge = 0; iMerge < mergeRound; iMerge ++){
+        
+            conf = new JobConf(MergePathDriver.class);
+            conf.setInt("sizeKmer", sizeKmer);
+            conf.setInt("iMerge", iMerge);
+            
+            if (defaultConfPath != null) {
+                conf.addResource(new Path(defaultConfPath));
+            }
+            conf.setJobName("Path Merge");
+            
+            conf.setMapperClass(MergePathMapper.class);
+            conf.setReducerClass(MergePathReducer.class);
+            
+            conf.setMapOutputKeyClass(VKmerBytesWritable.class);
+            conf.setMapOutputValueClass(MergePathValueWritable.class);
+            
+            conf.setInputFormat(SequenceFileInputFormat.class);
+            
+            String uncomplete = "uncomplete" + iMerge;
+            String complete = "complete" + iMerge;
+           
+            MultipleOutputs.addNamedOutput(conf, uncomplete,
+                    MergePathMultiSeqOutputFormat.class, VKmerBytesWritable.class,
+                    MergePathValueWritable.class);
+
+            MultipleOutputs.addNamedOutput(conf, complete,
+                    MergePathMultiTextOutputFormat.class, VKmerBytesWritable.class,
+                    MergePathValueWritable.class);
+            
+            conf.setOutputKeyClass(VKmerBytesWritable.class);
+            conf.setOutputValueClass(MergePathValueWritable.class);
+            
+            FileInputFormat.setInputPaths(conf, new Path(inputPath + "-step1"));
+            FileOutputFormat.setOutputPath(conf, new Path(outputPath));
+            conf.setNumReduceTasks(numReducers);
+            dfs.delete(new Path(outputPath), true);
+            JobClient.runJob(conf);
+            dfs.delete(new Path(inputPath + "-step1"), true);
+            dfs.rename(new Path(outputPath + "/" + uncomplete), new Path(inputPath + "-step1"));
+            dfs.rename(new Path(outputPath + "/" + complete), new Path(mergeResultPath + "/" + complete));
+        }
+
+        conf = new JobConf(MergePathDriver.class);
+        conf.setInt("sizeKmer", sizeKmer);
+        conf.setInt("iMerge", iMerge);
+        
+        if (defaultConfPath != null) {
+            conf.addResource(new Path(defaultConfPath));
+        }
+        conf.setJobName("Path Merge");
+        
+        conf.setMapperClass(MergePathMapper.class);
+        conf.setReducerClass(MergePathReducer.class);
+        
+        conf.setMapOutputKeyClass(VKmerBytesWritable.class);
+        conf.setMapOutputValueClass(MergePathValueWritable.class);
+        
+        conf.setInputFormat(SequenceFileInputFormat.class);
+        
+        String uncomplete = "uncomplete" + iMerge;
+        String complete = "complete" + iMerge;
+       
+        MultipleOutputs.addNamedOutput(conf, uncomplete,
+                MergePathMultiTextOutputFormat.class, VKmerBytesWritable.class,
+                MergePathValueWritable.class);
+
+        MultipleOutputs.addNamedOutput(conf, complete,
+                MergePathMultiTextOutputFormat.class, VKmerBytesWritable.class,
+                MergePathValueWritable.class);
+        
+        conf.setOutputKeyClass(VKmerBytesWritable.class);
+        conf.setOutputValueClass(MergePathValueWritable.class);
+        
+        FileInputFormat.setInputPaths(conf, new Path(inputPath + "-step1"));
+        FileOutputFormat.setOutputPath(conf, new Path(outputPath));
+        conf.setNumReduceTasks(numReducers);
+        dfs.delete(new Path(outputPath), true);
+        JobClient.runJob(conf);
+        dfs.delete(new Path(inputPath + "-step1"), true);
+        dfs.rename(new Path(outputPath + "/" + uncomplete), new Path(inputPath + "-step1"));
+        dfs.rename(new Path(outputPath + "/" + complete), new Path(mergeResultPath + "/" + complete));*/
+    }
+
+    public static void main(String[] args) throws Exception {
+        Options options = new Options();
+        CmdLineParser parser = new CmdLineParser(options);
+        parser.parseArgument(args);
+        MergePathH2Driver driver = new MergePathH2Driver();
+        driver.run(options.inputPath, options.outputPath, options.mergeResultPath, options.numReducers, options.sizeKmer, options.mergeRound, null);
+    }
+}
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Mapper.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Mapper.java
new file mode 100644
index 0000000..69106df
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Mapper.java
@@ -0,0 +1,85 @@
+package edu.uci.ics.pathmergingh2;
+
+import java.io.IOException;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import edu.uci.ics.genomix.type.GeneCode;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritableFactory;
+
+@SuppressWarnings("deprecation")
+public class MergePathH2Mapper extends MapReduceBase implements
+        Mapper<VKmerBytesWritable, MergePathValueWritable, VKmerBytesWritable, MergePathValueWritable> {
+
+    private int KMER_SIZE;
+    private VKmerBytesWritableFactory outputKmerFactory;
+    private MergePathValueWritable outputValue; 
+    private VKmerBytesWritable tmpKmer;
+    private VKmerBytesWritable outputKmer;
+
+    public void configure(JobConf job) {
+        KMER_SIZE = job.getInt("sizeKmer", 0);
+        outputKmerFactory = new VKmerBytesWritableFactory(KMER_SIZE);
+        outputValue = new MergePathValueWritable();
+        tmpKmer = new VKmerBytesWritable(KMER_SIZE);
+        outputKmer = new VKmerBytesWritable(KMER_SIZE);
+    }
+    @Override
+    public void map(VKmerBytesWritable key, MergePathValueWritable value,
+            OutputCollector<VKmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
+        byte precursor = (byte) 0xF0;
+        byte succeed = (byte) 0x0F;
+        byte adjBitMap = value.getAdjBitMap();
+        byte bitFlag = value.getFlag();
+        precursor = (byte) (precursor & adjBitMap);
+        precursor = (byte) ((precursor & 0xff) >> 4);
+        succeed = (byte) (succeed & adjBitMap);        
+        byte bitStartEnd = (byte) (0x81 & bitFlag);
+        
+        switch (bitStartEnd) {
+            case 0x01:
+                byte succeedCode = GeneCode.getGeneCodeFromBitMap(succeed);
+                tmpKmer.set(outputKmerFactory.getLastKmerFromChain(KMER_SIZE, key));
+                outputKmer.set(outputKmerFactory.shiftKmerWithNextCode(tmpKmer, succeedCode));
+                
+                tmpKmer.set(outputKmerFactory.getFirstKmerFromChain(key.getKmerLength() - (KMER_SIZE - 1), key));
+                bitFlag = (byte) (bitFlag | 0x08);
+                outputValue.set(tmpKmer, adjBitMap, bitFlag);
+                output.collect(outputKmer, outputValue);
+                break;
+            case (byte) 0x80:
+                tmpKmer.set(outputKmerFactory.getFirstKmerFromChain(KMER_SIZE, key));
+                outputKmer.set(tmpKmer);//?????
+                tmpKmer.set(outputKmerFactory.getLastKmerFromChain(key.getKmerLength() - KMER_SIZE, key));
+                bitFlag = (byte) (bitFlag | 0x10);
+                outputValue.set(tmpKmer, adjBitMap, bitFlag);
+                output.collect(outputKmer, outputValue);
+                break;
+            case 0x00:
+                succeedCode = GeneCode.getGeneCodeFromBitMap(succeed);
+                tmpKmer.set(outputKmerFactory.getLastKmerFromChain(KMER_SIZE, key));
+                outputKmer.set(outputKmerFactory.shiftKmerWithNextCode(tmpKmer, succeedCode));
+                
+                tmpKmer.set(outputKmerFactory.getFirstKmerFromChain(key.getKmerLength() - (KMER_SIZE - 1), key));
+                bitFlag = (byte) (bitFlag | 0x08);
+                outputValue.set(tmpKmer, adjBitMap, bitFlag);
+                output.collect(outputKmer, outputValue);
+                
+                bitFlag = (byte) (bitFlag & 0xF7);
+                tmpKmer.set(outputKmerFactory.getFirstKmerFromChain(KMER_SIZE, key));
+                outputKmer.set(tmpKmer);//?????
+                tmpKmer.set(outputKmerFactory.getLastKmerFromChain(key.getKmerLength() - KMER_SIZE, key));
+                bitFlag = (byte) (bitFlag | 0x10);
+                outputValue.set(tmpKmer, adjBitMap, bitFlag);
+                output.collect(outputKmer, outputValue);
+                break;
+            case (byte) 0x81:
+                outputKmer.set(key);
+            outputValue.set(null, 0, 0, adjBitMap, bitFlag, 0);//????????
+                break;
+        }
+    }
+}
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Reducer.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Reducer.java
new file mode 100644
index 0000000..d206926
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathH2Reducer.java
@@ -0,0 +1,97 @@
+package edu.uci.ics.pathmergingh2;
+
+import java.io.IOException;
+import java.util.Iterator;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.lib.MultipleOutputs;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritableFactory;
+
+@SuppressWarnings("deprecation")
+public class MergePathH2Reducer extends MapReduceBase implements
+        Reducer<VKmerBytesWritable, MergePathValueWritable, VKmerBytesWritable, MergePathValueWritable> {
+    private VKmerBytesWritableFactory kmerFactory;
+    private VKmerBytesWritable outputKmer;
+    private VKmerBytesWritable tmpKmer1;
+    private VKmerBytesWritable tmpKmer2;
+    private int KMER_SIZE;
+    private MergePathValueWritable outputValue;
+    private MergePathValueWritable tmpOutputValue;
+
+    MultipleOutputs mos = null;
+    private int I_MERGE;
+
+    public void configure(JobConf job) {
+        mos = new MultipleOutputs(job);
+        I_MERGE = Integer.parseInt(job.get("iMerge"));
+        KMER_SIZE = job.getInt("sizeKmer", 0);
+        outputValue = new MergePathValueWritable();
+        tmpOutputValue = new MergePathValueWritable();
+        kmerFactory = new VKmerBytesWritableFactory(KMER_SIZE);
+        outputKmer = new VKmerBytesWritable(KMER_SIZE);
+        tmpKmer1 = new VKmerBytesWritable(KMER_SIZE);
+        tmpKmer2 = new VKmerBytesWritable(KMER_SIZE);
+    }
+
+    @SuppressWarnings("unchecked")
+    public void reduce(VKmerBytesWritable key, Iterator<MergePathValueWritable> values,
+            OutputCollector<VKmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
+        outputValue = values.next();
+        if (values.hasNext() == true) {
+            byte bitFlag = outputValue.getFlag();
+            byte bitStartEnd = (byte) (0x81 & bitFlag);
+            byte bitPosiNegative = (byte) (0x18 & bitFlag);
+            byte succeed = (byte) 0x0F;            
+
+            switch (bitPosiNegative) {
+                case (byte) 0x08:
+                    tmpKmer1.set(kmerFactory.mergeTwoKmer(outputValue.getKmer(), key));
+                byte adjBitMap = outputValue.getAdjBitMap();
+                    outputValue = values.next();
+                    if (bitStartEnd == 0x80) {
+                        tmpKmer2.set(kmerFactory.mergeTwoKmer(key, outputValue.getKmer()));
+                        tmpOutputValue.set(null, 0, 0, outputValue.getAdjBitMap(), outputValue.getFlag(), 0);
+                        mos.getCollector("uncomplete" + I_MERGE, reporter).collect(tmpKmer2, tmpOutputValue);
+                    }
+
+                    outputKmer.set(kmerFactory.mergeTwoKmer(tmpKmer1, outputValue.getKmer()));
+                    succeed = (byte) (succeed & outputValue.getFlag());
+                    adjBitMap = (byte) (adjBitMap & 0xF0);
+                    adjBitMap = (byte) (adjBitMap | succeed);
+                    byte outputFlag = (byte) (0x81 & bitFlag);
+                    outputFlag = (byte) (outputFlag & outputValue.getFlag());
+                    outputValue.set(null, 0, 0, adjBitMap, outputFlag, 0);
+                    mos.getCollector("uncomplete" + I_MERGE, reporter).collect(outputKmer, outputValue);
+                    break;
+                case (byte) 0x10:
+                    tmpKmer1.set(kmerFactory.mergeTwoKmer(key, outputValue.getKmer()));
+                    if (bitStartEnd == 0x80) {
+                        tmpOutputValue.set(null, 0, 0, outputValue.getAdjBitMap(), outputValue.getFlag(), 0);
+                        mos.getCollector("uncomplete" + I_MERGE, reporter).collect(tmpKmer1, tmpOutputValue);
+                    }
+                    succeed = (byte) (succeed & outputValue.getFlag());
+                    outputValue = values.next();
+                    outputKmer.set(kmerFactory.mergeTwoKmer(outputValue.getKmer(), tmpKmer1));
+                    adjBitMap = outputValue.getAdjBitMap();
+                    adjBitMap = (byte) (adjBitMap & 0xF0);
+                    adjBitMap = (byte) (adjBitMap | succeed);
+                    outputFlag = (byte) (0x81 & bitFlag);
+                    outputFlag = (byte) (outputFlag & outputValue.getFlag());
+                    outputValue.set(null, 0, 0, adjBitMap, outputFlag, 0);
+                    mos.getCollector("uncomplete" + I_MERGE, reporter).collect(outputKmer, outputValue);
+                    break;
+            }
+        } else {
+            byte bitFlag = outputValue.getFlag();
+            byte bitStartEnd = (byte) (0x81 & bitFlag);
+            if(bitStartEnd == 0x81) {
+                outputKmer.set(key);
+                mos.getCollector("complete" + I_MERGE, reporter).collect(outputKmer, outputValue);
+            }
+        }
+    }
+}
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathMultiSeqOutputFormat.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathMultiSeqOutputFormat.java
new file mode 100644
index 0000000..cbde512
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathMultiSeqOutputFormat.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pathmergingh2;
+
+import java.io.File;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.mapred.lib.MultipleSequenceFileOutputFormat;
+
+
+public class MergePathMultiSeqOutputFormat extends MultipleSequenceFileOutputFormat<BytesWritable, MergePathValueWritable>{
+    @Override
+    protected String generateLeafFileName(String name) {
+        // TODO Auto-generated method stub System.out.println(name); 
+        String[] names = name.split("-");
+        return names[0] + File.separator + name;
+    }
+}
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathMultiTextOutputFormat.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathMultiTextOutputFormat.java
new file mode 100644
index 0000000..d6176e2
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathMultiTextOutputFormat.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pathmergingh2;
+
+import java.io.File;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
+
+public class MergePathMultiTextOutputFormat extends MultipleTextOutputFormat<Text, Text>{
+    @Override
+    protected String generateLeafFileName(String name) {
+        // TODO Auto-generated method stub System.out.println(name); 
+        String[] names = name.split("-");
+        return names[0] + File.separator + name;
+    }
+}
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathValueWritable.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathValueWritable.java
new file mode 100644
index 0000000..c3fbe1c
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/MergePathValueWritable.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pathmergingh2;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import org.apache.hadoop.io.BinaryComparable;
+import org.apache.hadoop.io.WritableComparable;
+
+import edu.uci.ics.genomix.type.GeneCode;
+import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
+
+public class MergePathValueWritable extends BinaryComparable implements WritableComparable<BinaryComparable> {
+
+    private static final byte[] EMPTY_BYTES = {};
+    private byte adjBitMap;
+    private byte flag;
+    private VKmerBytesWritable kmer;
+
+    public MergePathValueWritable() {
+        this((byte) 0, (byte) 0, 0, EMPTY_BYTES);
+    }
+
+    public MergePathValueWritable(byte adjBitMap, byte flag, int kmerSize, byte[] bytes) {
+        this.adjBitMap = adjBitMap;
+        this.flag = flag;
+        this.kmer = new VKmerBytesWritable(kmerSize, bytes);
+        kmer.set(bytes, 0, bytes.length);
+    }
+
+    public void set (MergePathValueWritable right) {
+        set(right.getBytes(), 0, right.getLength(), right.getAdjBitMap(), right.getFlag(), right.getKmerLength());
+    }
+    public void set(KmerBytesWritable mergedKmer, byte adjBitMap, byte bitFlag) {
+        set(mergedKmer.getBytes(), 0, mergedKmer.getLength(), adjBitMap, bitFlag, mergedKmer.getKmerLength());
+    }
+
+    public void set(byte[] newData, int offset, int length, byte adjBitMap, byte flag, int kmerSize) {
+        if (length != 0) {
+            kmer.set(kmerSize, newData, offset, length);
+        }
+        this.adjBitMap = adjBitMap;
+        this.flag = flag;
+    }
+
+    @Override
+    public void readFields(DataInput arg0) throws IOException {
+        // TODO Auto-generated method stub
+        kmer.readFields(arg0);
+        adjBitMap = arg0.readByte();
+        flag = arg0.readByte();
+    }
+
+    @Override
+    public void write(DataOutput arg0) throws IOException {
+        // TODO Auto-generated method stub
+        kmer.write(arg0);
+        arg0.writeByte(adjBitMap);
+        arg0.writeByte(flag);
+    }
+
+    public KmerBytesWritable getKmer() {
+        if (kmer.getLength() != 0) {
+            return kmer;
+        }
+        return null;
+    }
+
+    public byte getAdjBitMap() {
+        return this.adjBitMap;
+    }
+
+    public byte getFlag() {
+        return this.flag;
+    }
+
+    public String toString() {
+        return GeneCode.getSymbolFromBitMap(adjBitMap) + '\t' + String.valueOf(flag);
+    }
+
+    @Override
+    public byte[] getBytes() {
+        // TODO Auto-generated method stub
+        if (kmer.getLength() != 0) {
+            return kmer.getBytes();
+        } else
+            return null;
+    }
+
+    public int getKmerLength() {
+        return kmer.getKmerLength();
+    }
+    @Override
+    public int getLength() {
+        return kmer.getLength();
+    }
+}
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/SNodeInitialMapper.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/SNodeInitialMapper.java
new file mode 100644
index 0000000..75ac0d1
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/SNodeInitialMapper.java
@@ -0,0 +1,137 @@
+package edu.uci.ics.pathmergingh2;
+
+import java.io.IOException;
+import org.apache.hadoop.io.ByteWritable;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import edu.uci.ics.genomix.type.GeneCode;
+import edu.uci.ics.genomix.type.KmerBytesWritable;
+
+@SuppressWarnings("deprecation")
+public class SNodeInitialMapper extends MapReduceBase implements
+        Mapper<KmerBytesWritable, ByteWritable, KmerBytesWritable, MergePathValueWritable> {
+
+    public int KMER_SIZE;
+    public KmerBytesWritable outputKmer;
+    public MergePathValueWritable outputAdjList;
+
+    public void configure(JobConf job) {
+        KMER_SIZE = Integer.parseInt(job.get("sizeKmer"));
+        outputKmer = new KmerBytesWritable(KMER_SIZE);
+        outputAdjList = new MergePathValueWritable();
+    }
+
+    boolean measureDegree(byte adjacent) {
+        boolean result = true;
+        switch (adjacent) {
+            case 0:
+                result = true;
+                break;
+            case 1:
+                result = false;
+                break;
+            case 2:
+                result = false;
+                break;
+            case 3:
+                result = true;
+                break;
+            case 4:
+                result = false;
+                break;
+            case 5:
+                result = true;
+                break;
+            case 6:
+                result = true;
+                break;
+            case 7:
+                result = true;
+                break;
+            case 8:
+                result = false;
+                break;
+            case 9:
+                result = true;
+                break;
+            case 10:
+                result = true;
+                break;
+            case 11:
+                result = true;
+                break;
+            case 12:
+                result = true;
+                break;
+            case 13:
+                result = true;
+                break;
+            case 14:
+                result = true;
+                break;
+            case 15:
+                result = true;
+                break;
+        }
+        return result;
+    }
+
+    @Override
+    public void map(KmerBytesWritable key, ByteWritable value,
+            OutputCollector<KmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
+        byte precursor = (byte) 0xF0;
+        byte succeed = (byte) 0x0F;
+        byte adjBitMap = value.get();
+        byte bitFlag = (byte) 0;
+        precursor = (byte) (precursor & adjBitMap);
+        precursor = (byte) ((precursor & 0xff) >> 4);
+        succeed = (byte) (succeed & adjBitMap);
+        boolean inDegree = measureDegree(precursor);
+        boolean outDegree = measureDegree(succeed);
+        if (key.toString().equals("CGC")) {
+            int a = 2;
+            int b = a;
+        }
+        if (inDegree == false && outDegree == false) {
+
+            outputKmer.set(key);
+            bitFlag = (byte) 2;
+            outputAdjList.set(null, 0, 0, adjBitMap, bitFlag, 0);
+            output.collect(outputKmer, outputAdjList);
+        } else {
+            for (int i = 0; i < 4; i++) {
+                byte temp = 0x01;
+                byte shiftedCode = 0;
+                temp = (byte) (temp << i);
+                temp = (byte) (precursor & temp);
+                if (temp != 0) {
+                    byte precurCode = GeneCode.getGeneCodeFromBitMap(temp);
+                    shiftedCode = key.shiftKmerWithPreCode(precurCode);
+                    outputKmer.set(key);
+                    bitFlag = (byte) 0x80;
+                    outputAdjList.set(null, 0, 0, (byte) 6, bitFlag, 0);
+                    output.collect(outputKmer, outputAdjList);
+                    key.shiftKmerWithNextCode(shiftedCode);
+                }
+            }
+            for (int i = 0; i < 4; i++) {
+                byte temp = 0x01;
+                byte shiftedCode = 0;
+                temp = (byte) (temp << i);
+                temp = (byte) (succeed & temp);
+                if (temp != 0) {
+                    byte succeedCode = GeneCode.getGeneCodeFromBitMap(temp);
+                    shiftedCode = key.shiftKmerWithNextCode(succeedCode);
+                    outputKmer.set(key);
+                    bitFlag = (byte) 0x01;
+                    outputAdjList.set(null, 0, 0, (byte) 0, bitFlag, 0);
+                    output.collect(outputKmer, outputAdjList);
+                    key.shiftKmerWithPreCode(shiftedCode);
+                }
+            }
+        }
+    }
+}
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/SNodeInitialReducer.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/SNodeInitialReducer.java
new file mode 100644
index 0000000..3d2b58c
--- /dev/null
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/pathmergingh2/SNodeInitialReducer.java
@@ -0,0 +1,80 @@
+package edu.uci.ics.pathmergingh2;
+
+import java.io.IOException;
+import java.util.Iterator;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
+
+@SuppressWarnings("deprecation")
+public class SNodeInitialReducer extends MapReduceBase implements
+        Reducer<KmerBytesWritable, MergePathValueWritable, VKmerBytesWritable, MergePathValueWritable> {
+    private VKmerBytesWritable outputKmer = new VKmerBytesWritable();
+    private MergePathValueWritable outputValue = new MergePathValueWritable();
+
+    @Override
+    public void reduce(KmerBytesWritable key, Iterator<MergePathValueWritable> values,
+            OutputCollector<VKmerBytesWritable, MergePathValueWritable> output, Reporter reporter) throws IOException {
+        outputKmer.set(key);
+        outputValue = values.next();
+        byte startFlag = 0x00;
+        byte endFlag = 0x00;
+        byte targetPointFlag = 0x00;
+        byte targetAdjList = 0x00;
+        byte outputFlag = 0x00;
+        if(key.toString().equals("TCG")){
+            int a = 2;
+            int b = a;
+        }
+        if (values.hasNext() == true) {
+            switch (outputValue.getFlag()) {
+                case (byte) 0x01:
+                    startFlag = 0x01;
+                    break;
+                case (byte) 0x80:
+                    endFlag = 0x01;
+                    break;
+                case (byte) 0x02:
+                    targetPointFlag = 0x01;
+                    targetAdjList = outputValue.getAdjBitMap();
+                    break;
+            }
+            while (values.hasNext()) {
+                outputValue = values.next();
+                switch (outputValue.getFlag()) {
+                    case (byte) 0x01:
+                        startFlag = 0x01;
+                        break;
+                    case (byte) 0x80:
+                        endFlag = 0x01;
+                        break;
+                    case (byte) 0x02:
+                        targetPointFlag = 0x02;
+                        targetAdjList = outputValue.getAdjBitMap();
+                        break;
+                }
+                if(startFlag != 0x00 && endFlag!= 0x00 && targetPointFlag != 0x00)
+                    break;
+            }
+            if(targetPointFlag == 0x02) {
+                if(startFlag == 0x01) {
+                    outputFlag = (byte) (outputFlag | startFlag);
+                }
+                if(endFlag == 0x80) {
+                    outputFlag = (byte) (outputFlag | endFlag);
+                }
+                outputValue.set(null, 0, 0, targetAdjList, outputFlag, 0);
+                output.collect(outputKmer, outputValue);
+            }
+        } else {
+            if (outputValue.getFlag() == 2) {
+                byte bitFlag = 0;
+                outputValue.set(null, 0, 0, outputValue.getAdjBitMap(), bitFlag, 0);
+                output.collect(outputKmer, outputValue);
+            }
+        }
+    }
+}
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmerging/MergePathTest.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmerging/MergePathTest.java
index 95a9785..5e6e51e 100644
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmerging/MergePathTest.java
+++ b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmerging/MergePathTest.java
@@ -18,6 +18,7 @@
 import org.junit.Test;
 
 import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
 import edu.uci.ics.utils.TestUtils;
 
 @SuppressWarnings("deprecation")
@@ -33,7 +34,7 @@
     private static final String RESULT_PATH = "/result3";
     private static final String EXPECTED_PATH = "expected/result3";
     private static final String TEST_SOURCE_DIR = COMPARE_DIR + RESULT_PATH + "/comparesource.txt";
-    private static final int COUNT_REDUCER = 4;
+    private static final int COUNT_REDUCER = 1;
     private static final int SIZE_KMER = 3;
 
     private MiniDFSCluster dfsCluster;
@@ -48,12 +49,13 @@
         startHadoop();
 
         MergePathDriver tldriver = new MergePathDriver();
-        tldriver.run(HDFS_PATH, RESULT_PATH, HDFA_PATH_DATA, COUNT_REDUCER, SIZE_KMER, 3, HADOOP_CONF_PATH);
-
-        SequenceFile.Reader reader = null;
-        Path path = new Path(HDFA_PATH_DATA + "/complete2" + "/complete2-r-00000");
+        tldriver.run(HDFS_PATH, RESULT_PATH, HDFA_PATH_DATA, COUNT_REDUCER, SIZE_KMER, 1, HADOOP_CONF_PATH);
+        
+/*        SequenceFile.Reader reader = null;
+        Path path = new Path(RESULT_PATH + "/part-00000");
+//        Path path = new Path(RESULT_PATH + "/uncomplete0" + "/uncomplete0-r-00000");
         reader = new SequenceFile.Reader(dfs, path, conf);
-        KmerBytesWritable key = (KmerBytesWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+        VKmerBytesWritable key = (VKmerBytesWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
         MergePathValueWritable value = (MergePathValueWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
         File filePathTo = new File(TEST_SOURCE_DIR);
         BufferedWriter bw = new BufferedWriter(new FileWriter(filePathTo));
@@ -61,10 +63,10 @@
             bw.write(key.toString() + "\t" + value.getAdjBitMap() + "\t" + value.getFlag());
             bw.newLine();
         }
-        bw.close();
-
+        bw.close();*/
         dumpResult();
-        TestUtils.compareWithResult(new File(TEST_SOURCE_DIR), new File(EXPECTED_PATH));
+        
+//        TestUtils.compareWithResult(new File(TEST_SOURCE_DIR), new File(EXPECTED_PATH));
 
         cleanupHadoop();
 
@@ -96,7 +98,8 @@
     }
 
     private void dumpResult() throws IOException {
-        Path src = new Path(HDFA_PATH_DATA + "/" + "complete2");
+//        Path src = new Path(HDFA_PATH_DATA + "/" + "complete2");
+        Path src = new Path(RESULT_PATH);
         Path dest = new Path(ACTUAL_RESULT_DIR + "/");
         dfs.copyToLocalFile(src, dest);
     }
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmergingh2/MergePathH2Test.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmergingh2/MergePathH2Test.java
new file mode 100644
index 0000000..285e554
--- /dev/null
+++ b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmergingh2/MergePathH2Test.java
@@ -0,0 +1,107 @@
+package edu.uci.ics.pathmergingh2;
+
+import java.io.BufferedWriter;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.junit.Test;
+
+import edu.uci.ics.genomix.type.KmerBytesWritable;
+import edu.uci.ics.genomix.type.VKmerBytesWritable;
+import edu.uci.ics.utils.TestUtils;
+
+@SuppressWarnings("deprecation")
+public class MergePathH2Test {
+    private static final String ACTUAL_RESULT_DIR = "actual4";
+    private static final String COMPARE_DIR = "compare";
+    private JobConf conf = new JobConf();
+    private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
+    private static final String DATA_PATH = "actual2" + "/result2" + "/part-00000";
+    private static final String HDFS_PATH = "/webmap";
+    private static final String HDFA_PATH_DATA = "/webmapdata";
+    
+    private static final String RESULT_PATH = "/result4";
+    private static final String EXPECTED_PATH = "expected/result4";
+    private static final String TEST_SOURCE_DIR = COMPARE_DIR + RESULT_PATH + "/comparesource.txt";
+    private static final int COUNT_REDUCER = 1;
+    private static final int SIZE_KMER = 3;
+
+    private MiniDFSCluster dfsCluster;
+    private MiniMRCluster mrCluster;
+    private FileSystem dfs;
+
+    @SuppressWarnings("resource")
+    @Test
+    public void test() throws Exception {
+        FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
+        FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
+        startHadoop();
+
+        MergePathH2Driver tldriver = new MergePathH2Driver();
+        tldriver.run(HDFS_PATH, RESULT_PATH, HDFA_PATH_DATA, COUNT_REDUCER, SIZE_KMER, 0, HADOOP_CONF_PATH);
+        
+/*        SequenceFile.Reader reader = null;
+        Path path = new Path(RESULT_PATH + "/part-00000");
+//        Path path = new Path(RESULT_PATH + "/uncomplete0" + "/uncomplete0-r-00000");
+        reader = new SequenceFile.Reader(dfs, path, conf);
+        VKmerBytesWritable key = (VKmerBytesWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
+        MergePathValueWritable value = (MergePathValueWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
+        File filePathTo = new File(TEST_SOURCE_DIR);
+        BufferedWriter bw = new BufferedWriter(new FileWriter(filePathTo));
+        while (reader.next(key, value)) {
+            bw.write(key.toString() + "\t" + value.getAdjBitMap() + "\t" + value.getFlag());
+            bw.newLine();
+        }
+        bw.close();*/
+        dumpResult();
+        
+//        TestUtils.compareWithResult(new File(TEST_SOURCE_DIR), new File(EXPECTED_PATH));
+
+        cleanupHadoop();
+
+    }
+    private void startHadoop() throws IOException {
+        FileSystem lfs = FileSystem.getLocal(new Configuration());
+        lfs.delete(new Path("build"), true);
+        System.setProperty("hadoop.log.dir", "logs");
+        dfsCluster = new MiniDFSCluster(conf, 2, true, null);
+        dfs = dfsCluster.getFileSystem();
+        mrCluster = new MiniMRCluster(4, dfs.getUri().toString(), 2);
+
+        Path src = new Path(DATA_PATH);
+        Path dest = new Path(HDFS_PATH + "/");
+        dfs.mkdirs(dest);
+        dfs.copyFromLocalFile(src, dest);
+        Path data = new Path(HDFA_PATH_DATA + "/");
+        dfs.mkdirs(data);
+   
+        DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
+        conf.writeXml(confOutput);
+        confOutput.flush();
+        confOutput.close();
+    }
+
+    private void cleanupHadoop() throws IOException {
+        mrCluster.shutdown();
+        dfsCluster.shutdown();
+    }
+
+    private void dumpResult() throws IOException {
+//        Path src = new Path(HDFA_PATH_DATA + "/" + "complete2");
+        Path src = new Path(RESULT_PATH);
+        Path dest = new Path(ACTUAL_RESULT_DIR + "/");
+        dfs.copyToLocalFile(src, dest);
+    }
+}
+
diff --git a/genomix/genomix-hadoop/testactual/source.txt b/genomix/genomix-hadoop/testactual/source.txt
deleted file mode 100644
index aa7a107..0000000
--- a/genomix/genomix-hadoop/testactual/source.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-ATAGAAGATCGA	A|T	1
-AATAGAAGATCG	|A	1
-TAGAAGATCGAT	A|	1