Update

git-svn-id: https://hyracks.googlecode.com/svn/branches/fullstack_genomix@2922 123451ca-8445-de46-9d55-352943316053
diff --git a/genomix/genomix-pregelix/actual/LoadGraph.result b/genomix/genomix-pregelix/actual/LoadGraph.result
deleted file mode 100644
index b96a242..0000000
--- a/genomix/genomix-pregelix/actual/LoadGraph.result
+++ /dev/null
@@ -1,4 +0,0 @@
-06|Vertex(id=06,value=34, edges=())
-07|Vertex(id=07,value=68, edges=())
-1b|Vertex(id=1b,value=-120, edges=())
-2d|Vertex(id=2d,value=-34, edges=())
diff --git a/genomix/genomix-pregelix/actual/conf.xml b/genomix/genomix-pregelix/actual/conf.xml
deleted file mode 100644
index 0955f46..0000000
--- a/genomix/genomix-pregelix/actual/conf.xml
+++ /dev/null
@@ -1,180 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
-<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
-<property><name>mapred.task.cache.levels</name><value>2</value></property>
-<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop</value></property>
-<property><name>hadoop.native.lib</name><value>true</value></property>
-<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
-<property><name>dfs.namenode.decommission.nodes.per.interval</name><value>5</value></property>
-<property><name>dfs.https.need.client.auth</name><value>false</value></property>
-<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
-<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
-<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
-<property><name>dfs.namenode.logging.level</name><value>info</value></property>
-<property><name>dfs.datanode.address</name><value>127.0.0.1:0</value></property>
-<property><name>io.skip.checksum.errors</name><value>false</value></property>
-<property><name>fs.default.name</name><value>hdfs://localhost:54722</value></property>
-<property><name>mapred.child.tmp</name><value>./tmp</value></property>
-<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
-<property><name>dfs.safemode.threshold.pct</name><value>0.999f</value></property>
-<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
-<property><name>dfs.namenode.handler.count</name><value>10</value></property>
-<property><name>dfs.blockreport.initialDelay</name><value>0</value></property>
-<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
-<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
-<property><name>io.sort.factor</name><value>10</value></property>
-<property><name>mapred.task.timeout</name><value>600000</value></property>
-<property><name>mapred.max.tracker.failures</name><value>4</value></property>
-<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
-<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
-<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
-<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
-<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
-<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
-<property><name>dfs.safemode.extension</name><value>0</value></property>
-<property><name>tasktracker.http.threads</name><value>40</value></property>
-<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
-<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
-<property><name>mapred.output.compress</name><value>false</value></property>
-<property><name>io.bytes.per.checksum</name><value>512</value></property>
-<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.StaticMapping</value></property>
-<property><name>dfs.https.server.keystore.resource</name><value>ssl-server.xml</value></property>
-<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
-<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
-<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
-<property><name>dfs.name.edits.dir</name><value>${dfs.name.dir}</value></property>
-<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
-<property><name>hadoop.job.ugi</name><value>anbangx,anbangx,adm,cdrom,sudo,dip,plugdev,lpadmin,sambashare</value></property>
-<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
-<property><name>dfs.block.size</name><value>65536</value></property>
-<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
-<property><name>job.end.retry.attempts</name><value>0</value></property>
-<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
-<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
-<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
-<property><name>dfs.datanode.ipc.address</name><value>127.0.0.1:0</value></property>
-<property><name>dfs.permissions</name><value>true</value></property>
-<property><name>topology.script.number.args</name><value>100</value></property>
-<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
-<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
-<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
-<property><name>dfs.datanode.https.address</name><value>0.0.0.0:50475</value></property>
-<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
-<property><name>dfs.secondary.http.address</name><value>0.0.0.0:50090</value></property>
-<property><name>dfs.replication.max</name><value>512</value></property>
-<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
-<property><name>hadoop.security.authorization</name><value>false</value></property>
-<property><name>local.cache.size</name><value>10737418240</value></property>
-<property><name>mapred.min.split.size</name><value>0</value></property>
-<property><name>mapred.map.tasks</name><value>2</value></property>
-<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
-<property><name>dfs.https.client.keystore.resource</name><value>ssl-client.xml</value></property>
-<property><name>dfs.namenode.startup</name><value>REGULAR</value></property>
-<property><name>mapred.job.queue.name</name><value>default</value></property>
-<property><name>dfs.https.address</name><value>0.0.0.0:50470</value></property>
-<property><name>dfs.balance.bandwidthPerSec</name><value>1048576</value></property>
-<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
-<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
-<property><name>job.end.retry.interval</name><value>30000</value></property>
-<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
-<property><name>fs.checkpoint.dir</name><value>build/test/data/dfs/namesecondary1,build/test/data/dfs/namesecondary2</value></property>
-<property><name>mapred.reduce.tasks</name><value>1</value></property>
-<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
-<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
-<property><name>dfs.max.objects</name><value>0</value></property>
-<property><name>webinterface.private.actions</name><value>false</value></property>
-<property><name>io.sort.spill.percent</name><value>0.80</value></property>
-<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
-<property><name>dfs.datanode.dns.nameserver</name><value>default</value></property>
-<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
-<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
-<property><name>dfs.blockreport.intervalMsec</name><value>3600000</value></property>
-<property><name>mapred.map.max.attempts</name><value>4</value></property>
-<property><name>dfs.client.block.write.retries</name><value>3</value></property>
-<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
-<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
-<property><name>dfs.https.enable</name><value>false</value></property>
-<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
-<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
-<property><name>keep.failed.task.files</name><value>false</value></property>
-<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
-<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
-<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
-<property><name>io.map.index.skip</name><value>0</value></property>
-<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
-<property><name>dfs.default.chunk.view.size</name><value>32768</value></property>
-<property><name>hadoop.logfile.size</name><value>10000000</value></property>
-<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
-<property><name>dfs.datanode.du.reserved</name><value>0</value></property>
-<property><name>fs.checkpoint.period</name><value>3600</value></property>
-<property><name>dfs.web.ugi</name><value>webuser,webgroup</value></property>
-<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
-<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
-<property><name>dfs.df.interval</name><value>60000</value></property>
-<property><name>dfs.data.dir</name><value>${hadoop.tmp.dir}/dfs/data</value></property>
-<property><name>fs.s3.maxRetries</name><value>4</value></property>
-<property><name>dfs.datanode.dns.interface</name><value>default</value></property>
-<property><name>dfs.support.append</name><value>false</value></property>
-<property><name>dfs.permissions.supergroup</name><value>supergroup</value></property>
-<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
-<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
-<property><name>fs.trash.interval</name><value>0</value></property>
-<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
-<property><name>dfs.replication.min</name><value>1</value></property>
-<property><name>mapred.submit.replication</name><value>10</value></property>
-<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
-<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
-<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
-<property><name>dfs.namenode.decommission.interval</name><value>3</value></property>
-<property><name>dfs.http.address</name><value>localhost:38774</value></property>
-<property><name>dfs.heartbeat.interval</name><value>3</value></property>
-<property><name>mapred.job.tracker</name><value>localhost:29007</value></property>
-<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
-<property><name>dfs.name.dir</name><value>build/test/data/dfs/name1,build/test/data/dfs/name2</value></property>
-<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
-<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
-<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
-<property><name>dfs.datanode.http.address</name><value>127.0.0.1:0</value></property>
-<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
-<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
-<property><name>dfs.replication.interval</name><value>3</value></property>
-<property><name>io.sort.record.percent</name><value>0.05</value></property>
-<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
-<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
-<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>20</value></property>
-<property><name>dfs.replication</name><value>1</value></property>
-<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
-<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
-<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
-<property><name>hadoop.logfile.count</name><value>10</value></property>
-<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
-<property><name>mapred.max.split.size</name><value>128</value></property>
-<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
-<property><name>fs.s3.block.size</name><value>67108864</value></property>
-<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
-<property><name>mapred.acls.enabled</name><value>false</value></property>
-<property><name>mapred.queue.names</name><value>default</value></property>
-<property><name>dfs.access.time.precision</name><value>3600000</value></property>
-<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
-<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
-<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
-<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
-<property><name>io.sort.mb</name><value>100</value></property>
-<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
-<property><name>mapred.compress.map.output</name><value>false</value></property>
-<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
-<property><name>ipc.client.kill.max</name><value>10</value></property>
-<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
-<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
-<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
-<property><name>io.file.buffer.size</name><value>4096</value></property>
-<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
-<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
-<property><name>dfs.datanode.handler.count</name><value>3</value></property>
-<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
-<property><name>mapred.task.profile</name><value>false</value></property>
-<property><name>dfs.replication.considerLoad</name><value>true</value></property>
-<property><name>jobclient.output.filter</name><value>FAILED</value></property>
-<property><name>mapred.tasktracker.map.tasks.maximum</name><value>20</value></property>
-<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
-<property><name>fs.checkpoint.size</name><value>67108864</value></property>
-</configuration>
\ No newline at end of file
diff --git a/genomix/genomix-pregelix/pom.xml b/genomix/genomix-pregelix/pom.xml
deleted file mode 100644
index 28f5e1c..0000000
--- a/genomix/genomix-pregelix/pom.xml
+++ /dev/null
@@ -1,164 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.pregelix</groupId>
-  <artifactId>genomix-pregelix</artifactId>
-  <version>0.2.2</version>
-  <packaging>jar</packaging>
-  <name>genomix-pregelix</name>
-
-  <properties>
-    <jvm.extraargs/>
-  </properties>
-
-  <profiles>
-    <profile>
-      <id>macosx</id>
-      <activation>
-        <os>
-          <name>mac os x</name>
-        </os>
-        <jdk>1.7</jdk>
-      </activation>
-      <properties>
-        <jvm.extraargs>-Djava.nio.channels.spi.SelectorProvider=sun.nio.ch.KQueueSelectorProvider</jvm.extraargs>
-      </properties>
-    </profile>
-  </profiles>
-
-  <build>
-        <plugins>
-            <plugin>
-                                <groupId>org.apache.maven.plugins</groupId>
-                                <artifactId>maven-compiler-plugin</artifactId>
-                                <version>2.0.2</version>
-                                <configuration>
-                                        <source>1.6</source>
-                                        <target>1.6</target>
-                                </configuration>
-                        </plugin>
-                        <plugin>
-                                <artifactId>maven-assembly-plugin</artifactId>
-                                <configuration>
-                                        <descriptorRefs>
-                                                <descriptorRef>jar-with-dependencies</descriptorRef>
-                                        </descriptorRefs>
-                                </configuration>
-                                <executions>
-                                        <execution>
-                                                <id>make-my-jar-with-dependencies</id>
-                                                <phase>package</phase>
-                                                <goals>
-                                                        <goal>single</goal>
-                                                </goals>
-                                        </execution>
-                                </executions>
-                        </plugin>
-                        <plugin>
-                                <groupId>org.apache.maven.plugins</groupId>
-                                <artifactId>maven-surefire-plugin</artifactId>
-                                <version>2.7.2</version>
-                                <configuration>
-                                        <forkMode>pertest</forkMode>
-                                        <argLine>-enableassertions -Xmx2047m -Dfile.encoding=UTF-8
-                                                -Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
-                                        <includes>
-                                                <include>**/*TestSuite.java</include>
-                                                <include>**/*Test.java</include>
-                                        </includes>
-                                </configuration>
-                        </plugin>
-                        <plugin>
-                                <artifactId>maven-clean-plugin</artifactId>
-                                <configuration>
-                                        <filesets>
-                                                <fileset>
-                                                        <directory>.</directory>
-                                                        <includes>
-                                                                <include>teststore*</include>
-                                                                <include>edu*</include>
-                                                                <include>actual*</include>
-                                                                <include>build*</include>
-                                                                <include>expect*</include>
-                                                                <include>ClusterController*</include>
-                                                                <include>edu.uci.*</include>
-                                                        </includes>
-                                                </fileset>
-                                        </filesets>
-                                </configuration>
-                        </plugin>
-                </plugins>
-        </build>
-
-        <dependencies>
-                <dependency>
-                        <groupId>edu.uci.ics.hyracks</groupId>
-                        <artifactId>pregelix-core</artifactId>
-                        <version>0.2.2</version>
-                        <type>jar</type>
-                        <scope>compile</scope>
-                </dependency>
-                <dependency>
-                        <groupId>edu.uci.ics.hyracks</groupId>
-                        <artifactId>pregelix-example</artifactId>
-                        <version>0.2.2</version>
-                        <type>jar</type>
-                        <scope>compile</scope>
-                </dependency>
-                <dependency>
-                        <groupId>junit</groupId>
-                        <artifactId>junit</artifactId>
-                        <version>4.8.1</version>
-                        <scope>test</scope>
-                </dependency>
-        </dependencies>
-
-  <scm>
-    <connection>scm:svn:https://hyracks.googlecode.com/svn/trunk/fullstack/pregelix</connection>
-    <developerConnection>scm:svn:https://hyracks.googlecode.com/svn/trunk/fullstack/pregelix</developerConnection>
-    <url>http://code.google.com/p/hyracks/source/browse/#svn/trunk/fullstack/pregelix</url>
-  </scm>
-
-  <distributionManagement>
-    <repository>
-      <id>hyracks-releases</id>
-      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-releases/</url>
-    </repository>
-    <snapshotRepository>
-      <id>hyracks-snapshots</id>
-      <url>http://obelix.ics.uci.edu/nexus/content/repositories/hyracks-snapshots/</url>
-    </snapshotRepository>
-  </distributionManagement>
-
-  <reporting>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-changelog-plugin</artifactId>
-      </plugin>
-    </plugins>
-  </reporting>
-
-  <repositories>
-    <repository>
-      <id>hyracks-public</id>
-      <url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
-    </repository>
-    <repository>
-      <id>jboss-public</id>
-      <url>https://repository.jboss.org/nexus/content/groups/public/</url>
-    </repository>
-  </repositories>
-
-  <pluginRepositories>
-    <pluginRepository>
-      <id>hyracks-public</id>
-      <url>http://obelix.ics.uci.edu/nexus/content/groups/hyracks-public/</url>
-      <releases>
-        <updatePolicy>always</updatePolicy>
-      </releases>
-    </pluginRepository>
-  </pluginRepositories>
-</project>
-
-
diff --git a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/LoadGraphVertex.java b/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/LoadGraphVertex.java
deleted file mode 100644
index 4af68fb..0000000
--- a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/LoadGraphVertex.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package edu.uci.ics.pregelix;
-
-import java.io.IOException;
-import java.util.Iterator;
-
-import org.apache.hadoop.io.ByteWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.RecordWriter;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import edu.uci.ics.pregelix.api.graph.Vertex;
-import edu.uci.ics.pregelix.api.io.VertexWriter;
-import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat;
-import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
-import edu.uci.ics.pregelix.api.job.PregelixJob;
-import edu.uci.ics.pregelix.example.client.Client;
-import edu.uci.ics.pregelix.example.io.MessageWritable;
-
-/*
- * vertexId: BytesWritable
- * vertexValue: ByteWritable
- * edgeValue: NullWritable
- * message: MessageWritable
- * 
- * DNA:
- * A: 00
- * C: 01
- * G: 10
- * T: 11
- * 
- * succeed node
- *  A 00000001 1
- *  G 00000010 2
- *  C 00000100 4
- *  T 00001000 8
- * precursor node
- *  A 00010000 16
- *  G 00100000 32
- *  C 01000000 64
- *  T 10000000 128
- *  
- * For example, ONE LINE in input file: 00,01,10	0001,0010,
- * That means that vertexId is ACG, its succeed node is A and its precursor node is C.
- * The succeed node and precursor node will be stored in vertexValue and we don't use edgeValue.
- * The details about message are in edu.uci.ics.pregelix.example.io.MessageWritable. 
- */
-public class LoadGraphVertex extends Vertex<BytesWritable, ByteWritable, NullWritable, MessageWritable>{
-	
-	private ByteWritable tmpVertexValue = new ByteWritable();
-	
-	/**
-	 * For test, in compute method, make each vertexValue shift 1 to left.
-	 * It will be modified when going forward to next step.
-	 */
-	@Override
-	public void compute(Iterator<MessageWritable> msgIterator) {
-		if(getSuperstep() == 1){
-			tmpVertexValue.set(getVertexValue().get());
-			tmpVertexValue.set((byte) (tmpVertexValue.get() << 1));
-			setVertexValue(tmpVertexValue);
-		}
-		else
-			voteToHalt();
-	 }
-	
-    /**
-     * Simple VertexWriter that supports {@link SimpleLoadGraphVertex}
-     */
-    public static class SimpleLoadGraphVertexWriter extends
-            TextVertexWriter<BytesWritable, ByteWritable, NullWritable> {
-        public SimpleLoadGraphVertexWriter(RecordWriter<Text, Text> lineRecordWriter) {
-            super(lineRecordWriter);
-        }
-
-        @Override
-        public void writeVertex(Vertex<BytesWritable, ByteWritable, NullWritable, ?> vertex) throws IOException,
-                InterruptedException {
-            getRecordWriter().write(new Text(vertex.getVertexId().toString()),
-                    new Text(vertex.getVertexValue().toString()));
-        }
-    }
-
-    /**
-     * Simple VertexOutputFormat that supports {@link SimpleLoadGraphVertex}
-     */
-    public static class SimpleLoadGraphVertexOutputFormat extends
-            TextVertexOutputFormat<BytesWritable, ByteWritable, NullWritable> {
-
-        @Override
-        public VertexWriter<BytesWritable, ByteWritable, NullWritable> createVertexWriter(TaskAttemptContext context)
-                throws IOException, InterruptedException {
-            RecordWriter<Text, Text> recordWriter = textOutputFormat.getRecordWriter(context);
-            return new SimpleLoadGraphVertexWriter(recordWriter);
-        }
-    }
-	
-	/**
-	 * @param args
-	 */
-	public static void main(String[] args) throws Exception {
-        PregelixJob job = new PregelixJob(LoadGraphVertex.class.getSimpleName());
-        job.setVertexClass(LoadGraphVertex.class);
-        job.setVertexInputFormatClass(TextLoadGraphInputFormat.class);
-        job.setVertexOutputFormatClass(SimpleLoadGraphVertexOutputFormat.class);
-        Client.run(args, job);
-	}
-}
diff --git a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/TestLoadGraphVertex.java b/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/TestLoadGraphVertex.java
deleted file mode 100644
index 529d429..0000000
--- a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/TestLoadGraphVertex.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package edu.uci.ics.pregelix;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.util.StringTokenizer;
-
-import edu.uci.ics.pregelix.LoadGraphVertex.SimpleLoadGraphVertexOutputFormat;
-import edu.uci.ics.pregelix.api.job.PregelixJob;
-import edu.uci.ics.pregelix.example.client.Client;
-
-public class TestLoadGraphVertex {
-
-	/**
-	 * If running in different machines, the parameters need to be changed.
-	 * Now, this test is not completed.
-	 */
-	private static final String EXPECT_RESULT_FILE = "~/workspace/genomix-pregelix/expect/expected_result";
-	private static final String INPUT_PATHS = "~/workspace/genomix-pregelix/folder";
-	private static final String OUTPUT_PATH = "~/workspace/genomix-pregelix/tmp/pg_result"; //result
-	private static final String IP = "169.234.134.212"; 
-	private static final String PORT = "3099";
-	/**
-	 * @param args
-	 * @throws Exception 
-	 */
-	@SuppressWarnings("deprecation")
-	public static void main(String[] args) throws Exception {
-		// TODO Auto-generated method stub
-		//initiate args
-		args = new String[8];
-		args[0] = "-inputpaths"; 
-		args[1] = INPUT_PATHS;
-		args[2] = "-outputpath";
-		args[3] = OUTPUT_PATH;
-		args[4] = "-ip";
-		args[5] = IP;
-		args[6] = "-port";
-		args[7] = PORT;
-        PregelixJob job = new PregelixJob(LoadGraphVertex.class.getSimpleName());
-        job.setVertexClass(LoadGraphVertex.class);
-        job.setVertexInputFormatClass(TextLoadGraphInputFormat.class);
-        job.setVertexOutputFormatClass(SimpleLoadGraphVertexOutputFormat.class);
-        Client.run(args, job);
-        
-        generateExpectBinaryFile();
-        
-        //test if the actual file is the same as the expected file
-        DataInputStream actual_dis = new DataInputStream(new FileInputStream(OUTPUT_PATH + "/*"));
-        DataInputStream expected_dis = new DataInputStream(new FileInputStream(EXPECT_RESULT_FILE));
-        String actualLine, expectedLine = null;
-        StringTokenizer actualSt, expectedSt;
-		byte[] actualVertexId, expectedVertexId = null;
-		byte actualVertexValue, expectedVertexValue;
-        byte[] tmp = null;
-        while(((actualLine = actual_dis.readLine()) != null) && 
-        		((expectedLine = expected_dis.readLine()) != null)){
-        	actualSt = new StringTokenizer(actualLine, " ");
-			actualVertexId = actualSt.nextToken().getBytes();
-			tmp = actualSt.nextToken().getBytes();
-			actualVertexValue = tmp[0];
-			
-			expectedSt = new StringTokenizer(expectedLine," ");
-			expectedVertexId = expectedSt.nextToken().getBytes();
-			tmp = expectedSt.nextToken().getBytes();
-			expectedVertexValue = tmp[0];
-			
-			//assertEquals("actualVextexId == expectedVertexId", actualVertexId, expectedVertexId);
-			//assertEquals("actualVertexValue == expectedVertexValue", actualVertexValue, expectedVertexValue);
-        }
-        
-        //assertEquals("actualLine should be the end and be equal to null", actualLine, null);
-        //assertEquals("expectedLine should be the end and be equal to null", expectedLine, null);
-	}
-
-	@SuppressWarnings("deprecation")
-	public static void generateExpectBinaryFile() throws Exception{
-		DataInputStream dis = new DataInputStream(new FileInputStream(INPUT_PATHS + "/*"));
-		DataOutputStream dos = new DataOutputStream(new FileOutputStream(EXPECT_RESULT_FILE));
-		String line;
-		byte[] vertexId = null;
-		byte vertexValue;
-		byte[] tmp = null;
-		while((line = dis.readLine()) != null){
-			StringTokenizer st = new StringTokenizer(line, " ");
-			vertexId = st.nextToken().getBytes();
-			tmp = st.nextToken().getBytes();
-			vertexValue = tmp[0];		
-			
-			vertexValue = (byte) (vertexValue << 1); 
-			for(int i = 0; i < vertexId.length; i++)
-				dos.writeByte(vertexId[i]);
-			dos.writeByte((byte)32); //space
-			dos.writeByte(vertexValue);
-			dos.writeByte((byte)10); //line feed
-		}
-		
-		dis.close();
-		dos.close();
-	}
-}
diff --git a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/TextLoadGraphInputFormat.java b/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/TextLoadGraphInputFormat.java
deleted file mode 100644
index b7fda73..0000000
--- a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/TextLoadGraphInputFormat.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package edu.uci.ics.pregelix;
-
-import java.io.IOException;
-
-import org.apache.hadoop.io.ByteWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.RecordReader;
-import org.apache.hadoop.mapreduce.TaskAttemptContext;
-
-import edu.uci.ics.pregelix.api.graph.Vertex;
-import edu.uci.ics.pregelix.api.io.VertexReader;
-import edu.uci.ics.pregelix.api.io.text.TextVertexInputFormat;
-import edu.uci.ics.pregelix.api.util.BspUtils;
-import edu.uci.ics.pregelix.example.io.MessageWritable;
-
-public class TextLoadGraphInputFormat extends
-		TextVertexInputFormat<BytesWritable, ByteWritable, NullWritable, MessageWritable>{
-	
-	/**
-	 * Format INPUT
-	 */
-    @Override
-    public VertexReader<BytesWritable, ByteWritable, NullWritable, MessageWritable> createVertexReader(
-            InputSplit split, TaskAttemptContext context) throws IOException {
-        return new TextLoadGraphReader(textInputFormat.createRecordReader(split, context));
-    }
-    
-    @SuppressWarnings("rawtypes")
-    class TextLoadGraphReader extends
-            TextVertexReader<BytesWritable, ByteWritable, NullWritable, MessageWritable> {
-        private final static String separator = " ";
-        private Vertex vertex;
-        private BytesWritable vertexId = new BytesWritable();
-        private ByteWritable vertexValue = new ByteWritable();
-
-        public TextLoadGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
-            super(lineRecordReader);
-        }
-
-        @Override
-        public boolean nextVertex() throws IOException, InterruptedException {
-            return getRecordReader().nextKeyValue();
-        }
-
-        @SuppressWarnings("unchecked")
-        @Override
-        public Vertex<BytesWritable, ByteWritable, NullWritable, MessageWritable> getCurrentVertex() throws IOException,
-                InterruptedException {
-            if (vertex == null)
-                vertex = (Vertex) BspUtils.createVertex(getContext().getConfiguration());
-
-            vertex.getMsgList().clear();
-            vertex.getEdges().clear();
-            Text line = getRecordReader().getCurrentValue();
-            String[] fields = line.toString().split(separator);
-
-            if (fields.length > 0) {
-                /**
-                 * set the src vertex id
-                 */
-            	BytesWritable src = new BytesWritable(fields[0].getBytes());
-                vertexId.set(src);
-                vertex.setVertexId(vertexId);
-
-                
-                /**
-                 * set the vertex value
-                 */
-                byte[] temp = fields[1].getBytes();
-                vertexValue.set(temp[0]);
-                vertex.setVertexValue(vertexValue);
-                
-            }
-            return vertex;
-        }
-    }
-
-}
diff --git a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/example/io/MessageWritable.java b/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/example/io/MessageWritable.java
deleted file mode 100644
index 0dbd800..0000000
--- a/genomix/genomix-pregelix/src/main/java/edu/uci/ics/pregelix/example/io/MessageWritable.java
+++ /dev/null
@@ -1,89 +0,0 @@
-package edu.uci.ics.pregelix.example.io;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.File;
-import java.io.IOException;
-
-import org.apache.hadoop.io.WritableComparable;
-
-public class MessageWritable implements WritableComparable<MessageWritable>{
-	/**
-	 * bytes stores the chains of connected DNA
-	 * file stores the point to the file that stores the chains of connected DNA
-	 */
-	private byte[] bytes;
-	private File file;
-	
-	public MessageWritable(){		
-	}
-	
-	public MessageWritable(byte[] bytes, File file){
-		set(bytes,file);
-	}
-	
-	public void set(byte[] bytes, File file){
-		this.bytes = bytes;
-		this.file = file;
-	}
-			
-	public byte[] getBytes() {
-	    return bytes;
-	}
-	
-	public File getFile(){
-		return file;
-	}
-
-	@Override
-	public void write(DataOutput out) throws IOException {
-		// TODO Auto-generated method stub
-		out.write(bytes);
-		out.writeUTF(file.getAbsolutePath()); 
-	}
-
-	@Override
-	public void readFields(DataInput in) throws IOException {
-		// TODO Auto-generated method stub
-		in.readFully(bytes);
-		String absolutePath = in.readUTF();
-		file = new File(absolutePath);
-	}
-
-    @Override
-    public int hashCode() {
-    	int hashCode = 0;
-    	for(int i = 0; i < bytes.length; i++)
-    		hashCode = (int)bytes[i];
-        return hashCode;
-    }
-    @Override
-    public boolean equals(Object o) {
-        if (o instanceof MessageWritable) {
-        	MessageWritable tp = (MessageWritable) o;
-            return bytes == tp.bytes && file == tp.file;
-        }
-        return false;
-    }
-    @Override
-    public String toString() {
-        return bytes.toString() + "\t" + file.getAbsolutePath();
-    }
-    
-	@Override
-	public int compareTo(MessageWritable tp) {
-		// TODO Auto-generated method stub
-        int cmp;
-        if (bytes == tp.bytes)
-            cmp = 0;
-        else
-            cmp = 1;
-        if (cmp != 0)
-            return cmp;
-        if (file == tp.file)
-            return 0;
-        else
-            return 1;
-	}
-
-}
diff --git a/genomix/genomix-pregelix/src/test/java/edu/uci/ics/pregelix/LoadGraphVertexTest.java b/genomix/genomix-pregelix/src/test/java/edu/uci/ics/pregelix/LoadGraphVertexTest.java
deleted file mode 100644
index e88098d..0000000
--- a/genomix/genomix-pregelix/src/test/java/edu/uci/ics/pregelix/LoadGraphVertexTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package edu.uci.ics.pregelix;
-
-import static org.junit.Assert.*;
-
-import java.io.BufferedReader;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.util.StringTokenizer;
-
-import org.junit.Test;
-
-import edu.uci.ics.pregelix.LoadGraphVertex.SimpleLoadGraphVertexOutputFormat;
-import edu.uci.ics.pregelix.api.job.PregelixJob;
-import edu.uci.ics.pregelix.example.client.Client;
-
-public class LoadGraphVertexTest {
-
-	/**
-	 * I can't debug in JUnits test so that I can't find my error here. So I leave comments here.
-	 * I will figure out as soon as possible.
-	 */
-	private static final String EXPECT_RESULT_FILE = "expected_result";
-	private static final String INPUT_PATHS = "folder";
-	private static final String OUTPUT_PATH = "result";
-	private static final String IP = "169.234.134.212";
-	private static final String PORT = "3099";
-	
-	@SuppressWarnings("deprecation")
-	@Test
-	public void test() throws Exception {
-		//initiate args
-	/*	String[] args = new String[8];
-		args[0] = "-inputpaths"; 
-		args[1] = INPUT_PATHS;
-		args[2] = "-outputpath";
-		args[3] = OUTPUT_PATH;
-		args[4] = "-ip";
-		args[5] = IP;
-		args[6] = "-port";
-		args[7] = PORT;
-        PregelixJob job = new PregelixJob(LoadGraphVertex.class.getSimpleName());
-        job.setVertexClass(LoadGraphVertex.class);
-        job.setVertexInputFormatClass(TextLoadGraphInputFormat.class);
-        job.setVertexOutputFormatClass(SimpleLoadGraphVertexOutputFormat.class);
-        Client.run(args, job);
-        
-        generateExpectBinaryFile();
-        
-        //test if the actual file is the same as the expected file
-        DataInputStream actual_dis = new DataInputStream(new FileInputStream(OUTPUT_PATH + "/*"));
-        DataInputStream expected_dis = new DataInputStream(new FileInputStream(EXPECT_RESULT_FILE));
-        String actualLine, expectedLine = null;
-        StringTokenizer actualSt, expectedSt;
-		byte[] actualVertexId, expectedVertexId = null;
-		byte actualVertexValue, expectedVertexValue;
-        byte[] tmp = null;
-        while(((actualLine = actual_dis.readLine()) != null) && 
-        		((expectedLine = expected_dis.readLine()) != null)){
-        	actualSt = new StringTokenizer(actualLine, " ");
-			actualVertexId = actualSt.nextToken().getBytes();
-			tmp = actualSt.nextToken().getBytes();
-			actualVertexValue = tmp[0];
-			
-			expectedSt = new StringTokenizer(expectedLine," ");
-			expectedVertexId = expectedSt.nextToken().getBytes();
-			tmp = expectedSt.nextToken().getBytes();
-			expectedVertexValue = tmp[0];
-			
-			assertEquals("actualVextexId == expectedVertexId", actualVertexId, expectedVertexId);
-			assertEquals("actualVertexValue == expectedVertexValue", actualVertexValue, expectedVertexValue);
-        }
-        
-        assertEquals("actualLine should be the end and be equal to null", actualLine, null);
-        assertEquals("expectedLine should be the end and be equal to null", expectedLine, null);*/
-	}
-	
-	@SuppressWarnings("deprecation")
-	public void generateExpectBinaryFile() throws Exception{
-		DataInputStream dis = new DataInputStream(new FileInputStream(INPUT_PATHS + "/*"));
-		DataOutputStream dos = new DataOutputStream(new FileOutputStream(EXPECT_RESULT_FILE));
-		String line;
-		byte[] vertexId = null;
-		byte vertexValue;
-		byte[] tmp = null;
-		while((line = dis.readLine()) != null){
-			StringTokenizer st = new StringTokenizer(line, " ");
-			vertexId = st.nextToken().getBytes();
-			tmp = st.nextToken().getBytes();
-			vertexValue = tmp[0];		
-			
-			vertexValue = (byte) (vertexValue << 1); 
-			for(int i = 0; i < vertexId.length; i++)
-				dos.writeByte(vertexId[i]);
-			dos.writeByte((byte)32); //space
-			dos.writeByte(vertexValue);
-			dos.writeByte((byte)10); //line feed
-		}
-		
-		dis.close();
-		dos.close();
-	}
-
-}