reformat
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3.java
index a25f3d7..38dde9d 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3.java
@@ -105,7 +105,7 @@
             if (!finalMerge) {
                 headFlag |= (MessageFlag.IS_PSEUDOHEAD & value.getFlag());
             }
-            
+
             outFlag = (byte) (headFlag | (MessageFlag.IS_TAIL & value.getFlag()));
             if (headFlag != 0 || isNodeRandomHead(curNode.getNodeID())) {
                 // head nodes send themselves to their successor
@@ -114,7 +114,7 @@
                     headFlag |= (MessageFlag.IS_PSEUDOHEAD & value.getFlag());
                 }
                 outFlag |= MessageFlag.FROM_PREDECESSOR;
-                
+
                 outputValue.set(outFlag, curNode);
                 output.collect(outputKey, outputValue);
             } else {
@@ -186,7 +186,7 @@
                 // merge the head and tail as saved output, this merged node is now a head
                 //headNode.mergeNext(tailNode, KMER_SIZE);
                 outputValue.set(outFlag, headNode);
-                
+
                 if ((outFlag & MessageFlag.IS_TAIL) == MessageFlag.IS_TAIL) {
                     // Pseudoheads merging with tails don't become heads.
                     // Reset the IS_PSEUDOHEAD flag
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3Driver.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3Driver.java
index 7f4fe16..bd08a78 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3Driver.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/MergePathsH3Driver.java
@@ -47,9 +47,9 @@
 
     }
 
-    public void run(String inputPath, String outputPath, int numReducers, int sizeKmer, int mergeRound, String defaultConfPath, JobConf defaultConf)
-            throws IOException {
-        JobConf baseConf = defaultConf == null ? new JobConf() : defaultConf;        
+    public void run(String inputPath, String outputPath, int numReducers, int sizeKmer, int mergeRound,
+            String defaultConfPath, JobConf defaultConf) throws IOException {
+        JobConf baseConf = defaultConf == null ? new JobConf() : defaultConf;
         if (defaultConfPath != null) {
             baseConf.addResource(new Path(defaultConfPath));
         }
@@ -81,12 +81,22 @@
         dfs.rename(new Path(tmpOutputPath), new Path(outputPath)); // save final results
     }
 
+    public void run(String inputPath, String outputPath, int numReducers, int sizeKmer, int mergeRound,
+            String defaultConfPath) throws IOException {
+        run(inputPath, outputPath, numReducers, sizeKmer, mergeRound, defaultConfPath, null);
+    }
+
+    public void run(String inputPath, String outputPath, int numReducers, int sizeKmer, int mergeRound,
+            JobConf defaultConf) throws IOException {
+        run(inputPath, outputPath, numReducers, sizeKmer, mergeRound, null, defaultConf);
+    }
+
     public static void main(String[] args) throws Exception {
         Options options = new Options();
         CmdLineParser parser = new CmdLineParser(options);
         parser.parseArgument(args);
         MergePathsH3Driver driver = new MergePathsH3Driver();
-        driver.run(options.inputPath, options.outputPath, options.numReducers, 
-                options.sizeKmer, options.mergeRound, null, null);
+        driver.run(options.inputPath, options.outputPath, options.numReducers, options.sizeKmer, options.mergeRound,
+                null, null);
     }
 }
diff --git a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/pmcommon/PathNodeInitial.java b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/pmcommon/PathNodeInitial.java
index 97563de..e7bcdf6 100644
--- a/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/pmcommon/PathNodeInitial.java
+++ b/genomix/genomix-hadoop/src/main/java/edu/uci/ics/genomix/hadoop/pmcommon/PathNodeInitial.java
@@ -57,9 +57,9 @@
         private int inDegree;
         private int outDegree;
         private NodeWritable emptyNode;
-        
+
         public PathNodeInitialMapper() {
-            
+
         }
 
         public void configure(JobConf conf) {
@@ -98,8 +98,7 @@
                     outputKey.set(key.getRFList().getPosition(0));
                 }
                 output.collect(outputKey, outputValue);
-            }
-            else {
+            } else {
                 // TODO: all other nodes will not participate-- should they be collected in a "complete" output?
             }
         }
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/TestPathMergeH3.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/TestPathMergeH3.java
index 17c6b16..735c968 100644
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/TestPathMergeH3.java
+++ b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/graphclean/mergepaths/h3/TestPathMergeH3.java
@@ -58,8 +58,8 @@
         copyResultsToLocal(HDFS_MARKPATHS, ACTUAL_ROOT + PATHMARKS_FILE, false, conf);
 
         MergePathsH3Driver h3 = new MergePathsH3Driver();
-//        h3.run(HDFS_GRAPHBUILD, HDFS_MERGED, 2, KMER_LENGTH, 1, HADOOP_CONF_ROOT + "conf.xml", conf);
-        h3.run(HDFS_MARKPATHS, HDFS_MERGED, 2, KMER_LENGTH, 10, HADOOP_CONF_ROOT + "conf.xml", null);
+        h3.run(HDFS_MARKPATHS, HDFS_MERGED, 2, KMER_LENGTH, 10, null, conf);
+//        h3.run(HDFS_MARKPATHS, HDFS_MERGED, 2, KMER_LENGTH, 10, HADOOP_CONF_ROOT + "conf.xml", null);
         copyResultsToLocal(HDFS_MERGED, ACTUAL_ROOT + PATHMERGE_FILE, false, conf);
     }
 
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/pmcommon/GenomixMiniClusterTest.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/pmcommon/GenomixMiniClusterTest.java
index de7d8ce..7f1e1d7 100644
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/pmcommon/GenomixMiniClusterTest.java
+++ b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/genomix/hadoop/pmcommon/GenomixMiniClusterTest.java
@@ -37,15 +37,15 @@
 public class GenomixMiniClusterTest {
     protected int KMER_LENGTH = 5;
     protected int READ_LENGTH = 8;
-    
+
     // subclass should modify this to include the HDFS directories that should be cleaned up
     protected ArrayList<String> HDFS_PATHS = new ArrayList<String>();
 
     protected static String EXPECTED_ROOT = "src/test/resources/expected/";
     protected static String ACTUAL_ROOT = "src/test/resources/actual/";
-    
+
     protected static String HADOOP_CONF_ROOT = "src/test/resources/hadoop/conf/";
-    
+
     protected static MiniDFSCluster dfsCluster;
     protected static MiniMRCluster mrCluster;
     private static FileSystem dfs;
@@ -61,42 +61,41 @@
         HyracksUtils.init();
         FileUtils.forceMkdir(new File(ACTUAL_ROOT));
         FileUtils.cleanDirectory(new File(ACTUAL_ROOT));
-        driver = new Driver(HyracksUtils.CC_HOST,
-                HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT, numPartitionPerMachine);
+        driver = new Driver(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT, numPartitionPerMachine);
     }
-    
+
     /*
      * Merge and copy a DFS directory to a local destination, converting to text if necessary. 
      * Also locally store the binary-formatted result if available.
      */
-    protected static void copyResultsToLocal(String hdfsSrcDir, String localDestFile, boolean resultsAreText, Configuration conf) throws IOException {
+    protected static void copyResultsToLocal(String hdfsSrcDir, String localDestFile, boolean resultsAreText,
+            Configuration conf) throws IOException {
         if (resultsAreText) {
             // for text files, just concatenate them together
-            FileUtil.copyMerge(FileSystem.get(conf), new Path(hdfsSrcDir),
-                    FileSystem.getLocal(new Configuration()), new Path(localDestFile),
-                    false, conf, null);
+            FileUtil.copyMerge(FileSystem.get(conf), new Path(hdfsSrcDir), FileSystem.getLocal(new Configuration()),
+                    new Path(localDestFile), false, conf, null);
         } else {
             // file is binary
             // save the entire binary output dir
-            FileUtil.copy(FileSystem.get(conf), new Path(hdfsSrcDir),
-                    FileSystem.getLocal(new Configuration()), new Path(localDestFile + ".bindir"),
-                    false, conf);
-            
+            FileUtil.copy(FileSystem.get(conf), new Path(hdfsSrcDir), FileSystem.getLocal(new Configuration()),
+                    new Path(localDestFile + ".bindir"), false, conf);
+
             // also load the Nodes and write them out as text locally. 
             FileSystem lfs = FileSystem.getLocal(new Configuration());
             lfs.mkdirs(new Path(localDestFile).getParent());
             File filePathTo = new File(localDestFile);
             BufferedWriter bw = new BufferedWriter(new FileWriter(filePathTo));
-            
+
             FileStatus[] files = dfs.globStatus(new Path(hdfsSrcDir + "*"));
             SequenceFile.Reader reader = new SequenceFile.Reader(dfs, files[0].getPath(), conf);
-            SequenceFile.Writer writer = new SequenceFile.Writer(lfs, new JobConf(), new Path(localDestFile + ".binmerge"), reader.getKeyClass(), reader.getValueClass());
-            
+            SequenceFile.Writer writer = new SequenceFile.Writer(lfs, new JobConf(), new Path(localDestFile
+                    + ".binmerge"), reader.getKeyClass(), reader.getValueClass());
+
             Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
             Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
-            
+
             for (FileStatus f : files) {
-                if(f.getLen() == 0) {
+                if (f.getLen() == 0) {
                     continue;
                 }
                 reader = new SequenceFile.Reader(dfs, f.getPath(), conf);
@@ -108,7 +107,7 @@
                     System.out.println(key.toString() + "\t" + value.toString());
                     bw.newLine();
                     writer.append(key, value);
-                    
+
                 }
                 reader.close();
             }
@@ -117,9 +116,9 @@
         }
 
     }
-    
+
     protected static boolean checkResults(String expectedPath, String actualPath, int[] poslistField) throws Exception {
-        File dumped = new File(actualPath); 
+        File dumped = new File(actualPath);
         if (poslistField != null) {
             TestUtils.compareWithUnSortedPosition(new File(expectedPath), dumped, poslistField);
         } else {
@@ -137,7 +136,7 @@
 
     protected static void startHDFS() throws IOException {
         conf.addResource(new Path(HADOOP_CONF_ROOT + "core-site.xml"));
-//        conf.addResource(new Path(HADOOP_CONF_ROOT + "mapred-site.xml"));
+        //        conf.addResource(new Path(HADOOP_CONF_ROOT + "mapred-site.xml"));
         conf.addResource(new Path(HADOOP_CONF_ROOT + "hdfs-site.xml"));
 
         FileSystem lfs = FileSystem.getLocal(new Configuration());
@@ -148,17 +147,18 @@
         mrCluster = new MiniMRCluster(4, dfs.getUri().toString(), 2);
         System.out.println(dfs.getUri().toString());
 
-        DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_ROOT + "conf.xml")));
+        DataOutputStream confOutput = new DataOutputStream(
+                new FileOutputStream(new File(HADOOP_CONF_ROOT + "conf.xml")));
         conf.writeXml(confOutput);
         confOutput.close();
     }
-    
+
     protected static void copyLocalToDFS(String localSrc, String hdfsDest) throws IOException {
         Path dest = new Path(hdfsDest);
         dfs.mkdirs(dest);
         dfs.copyFromLocalFile(new Path(localSrc), dest);
     }
-    
+
     /*
      * Remove the local "actual" folder and any hdfs folders in use by this test
      */