change the test name
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/gbresultschecking/ResultsCheckingTest.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/gbresultschecking/ResultsCheckingTest.java
deleted file mode 100644
index 72e9b45..0000000
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/gbresultschecking/ResultsCheckingTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package edu.uci.ics.gbresultschecking;
-
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.junit.Test;
-
-@SuppressWarnings("deprecation")
-public class ResultsCheckingTest {
- private static final String ACTUAL_RESULT_DIR = "actual4";
- private JobConf conf = new JobConf();
- private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
- private static final String DATA_PATH1 = "ResultsCheckingData" + "/part-00000";
- private static final String DATA_PATH2 = "ResultsCheckingData" + "/part-00001";
- private static final String HDFS_PATH1 = "/webmap1";
- private static final String HDFS_PATH2 = "/webmap2";
- private static final String RESULT_PATH = "/result4";
- private static final int COUNT_REDUCER = 4;
- private static final int SIZE_KMER = 3;
- private MiniDFSCluster dfsCluster;
- private MiniMRCluster mrCluster;
- private FileSystem dfs;
-
- @Test
- public void test() throws Exception {
- FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
- FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
- startHadoop();
- ResultsCheckingDriver tldriver = new ResultsCheckingDriver();
- tldriver.run(HDFS_PATH1, HDFS_PATH2, RESULT_PATH, COUNT_REDUCER, SIZE_KMER, HADOOP_CONF_PATH);
- dumpResult();
- cleanupHadoop();
-
- }
- private void startHadoop() throws IOException {
- FileSystem lfs = FileSystem.getLocal(new Configuration());
- lfs.delete(new Path("build"), true);
- System.setProperty("hadoop.log.dir", "logs");
- dfsCluster = new MiniDFSCluster(conf, 2, true, null);
- dfs = dfsCluster.getFileSystem();
- mrCluster = new MiniMRCluster(4, dfs.getUri().toString(), 2);
-
- Path src = new Path(DATA_PATH1);
- Path dest = new Path(HDFS_PATH1 + "/");
- dfs.mkdirs(dest);
- dfs.copyFromLocalFile(src, dest);
- src = new Path(DATA_PATH2);
- dest = new Path(HDFS_PATH2 + "/");
- dfs.copyFromLocalFile(src, dest);
-
- DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
- conf.writeXml(confOutput);
- confOutput.flush();
- confOutput.close();
- }
-
- private void cleanupHadoop() throws IOException {
- mrCluster.shutdown();
- dfsCluster.shutdown();
- }
-
- private void dumpResult() throws IOException {
- Path src = new Path(RESULT_PATH);
- Path dest = new Path(ACTUAL_RESULT_DIR + "/");
- dfs.copyToLocalFile(src, dest);
- }
-}
-
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/graphbuilding/GraphBuildingTest.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/graphbuilding/GraphBuildingTest.java
deleted file mode 100755
index 002dd0a..0000000
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/graphbuilding/GraphBuildingTest.java
+++ /dev/null
@@ -1,145 +0,0 @@
-package edu.uci.ics.graphbuilding;
-
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.io.BufferedWriter;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-
-import edu.uci.ics.genomix.type.GeneCode;
-import edu.uci.ics.genomix.type.KmerBytesWritable;
-import edu.uci.ics.genomix.type.KmerCountValue;
-import edu.uci.ics.utils.TestUtils;
-/**
- * This class test the correctness of graphbuilding program
- */
-@SuppressWarnings("deprecation")
-public class GraphBuildingTest {
-
- private static final String ACTUAL_RESULT_DIR = "actual1";
- private static final String COMPARE_DIR = "compare";
- private JobConf conf = new JobConf();
- private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
- private static final String DATA_PATH = "data/webmap/TreePath";
- private static final String HDFS_PATH = "/webmap";
- private static final String RESULT_PATH = "/result1";
- private static final String EXPECTED_PATH = "expected/result1";
- private static final String TEST_SOURCE_DIR = COMPARE_DIR + RESULT_PATH + "/comparesource.txt";
- private static final int COUNT_REDUCER = 4;
- private static final int SIZE_KMER = 5;
- private static final String GRAPHVIZ = "Graphviz/GenomixSource.txt";
-
- private MiniDFSCluster dfsCluster;
- private MiniMRCluster mrCluster;
- private FileSystem dfs;
-
- @SuppressWarnings("resource")
- @Test
- public void test() throws Exception {
- FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
- FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
- startHadoop();
-
- // run graph transformation tests
- GenomixDriver tldriver = new GenomixDriver();
- tldriver.run(HDFS_PATH, RESULT_PATH, COUNT_REDUCER, SIZE_KMER, HADOOP_CONF_PATH);
-
- SequenceFile.Reader reader = null;
- Path path = new Path(RESULT_PATH + "/part-00000");
- reader = new SequenceFile.Reader(dfs, path, conf);
-// KmerBytesWritable key = (KmerBytesWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
- KmerBytesWritable key = new KmerBytesWritable(SIZE_KMER);
- KmerCountValue value = (KmerCountValue) ReflectionUtils.newInstance(reader.getValueClass(), conf);
- File filePathTo = new File(TEST_SOURCE_DIR);
- BufferedWriter bw = new BufferedWriter(new FileWriter(filePathTo));
- File GraphViz = new File(GRAPHVIZ);
- BufferedWriter bw2 = new BufferedWriter(new FileWriter(GraphViz));
-
- while (reader.next(key, value)) {
- byte succeed = (byte) 0x0F;
- byte adjBitMap = value.getAdjBitMap();
- succeed = (byte) (succeed & adjBitMap);
- byte shiftedCode = 0;
- for(int i = 0 ; i < 4; i ++){
- byte temp = 0x01;
- temp = (byte)(temp << i);
- temp = (byte) (succeed & temp);
- if(temp != 0 ){
- bw2.write(key.toString());
- bw2.newLine();
- byte succeedCode = GeneCode.getGeneCodeFromBitMap(temp);
- shiftedCode = key.shiftKmerWithNextCode(succeedCode);
- bw2.write(key.toString());
- bw2.newLine();
- key.shiftKmerWithPreCode(shiftedCode);
- }
- }
- bw.write(key.toString() + "\t" + value.toString());
- bw.newLine();
- }
- bw2.close();
- bw.close();
-
- dumpResult();
-// TestUtils.compareWithResult(new File(TEST_SOURCE_DIR), new File(EXPECTED_PATH));
-
- cleanupHadoop();
-
- }
-
- private void startHadoop() throws IOException {
- FileSystem lfs = FileSystem.getLocal(new Configuration());
- lfs.delete(new Path("build"), true);
- System.setProperty("hadoop.log.dir", "logs");
- dfsCluster = new MiniDFSCluster(conf, 2, true, null);
- dfs = dfsCluster.getFileSystem();
- mrCluster = new MiniMRCluster(4, dfs.getUri().toString(), 2);
-
- Path src = new Path(DATA_PATH);
- Path dest = new Path(HDFS_PATH + "/");
- dfs.mkdirs(dest);
- dfs.copyFromLocalFile(src, dest);
-
- DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
- conf.writeXml(confOutput);
- confOutput.flush();
- confOutput.close();
- }
-
- private void cleanupHadoop() throws IOException {
- mrCluster.shutdown();
- dfsCluster.shutdown();
- }
-
- private void dumpResult() throws IOException {
- Path src = new Path(RESULT_PATH);
- Path dest = new Path(ACTUAL_RESULT_DIR);
- dfs.copyToLocalFile(src, dest);
- }
-}
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/graphcountfilter/CountFilterTest.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/graphcountfilter/CountFilterTest.java
deleted file mode 100644
index f9b0049..0000000
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/graphcountfilter/CountFilterTest.java
+++ /dev/null
@@ -1,101 +0,0 @@
-package edu.uci.ics.graphcountfilter;
-
-import java.io.BufferedWriter;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.ByteWritable;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import edu.uci.ics.genomix.type.KmerBytesWritable;
-import edu.uci.ics.utils.TestUtils;
-
-
-@SuppressWarnings("deprecation")
-public class CountFilterTest {
- private static final String ACTUAL_RESULT_DIR = "actual2";
- private static final String COMPARE_DIR = "compare";
- private JobConf conf = new JobConf();
- private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
- private static final String DATA_PATH = "actual1" + "/result1" + "/part-00000";
- private static final String HDFS_PATH = "/webmap";
- private static final String RESULT_PATH = "/result2";
- private static final String EXPECTED_PATH = "expected/result2";
- private static final String TEST_SOURCE_DIR = COMPARE_DIR + RESULT_PATH + "/comparesource.txt";
- private static final int COUNT_REDUCER = 4;
- private static final int SIZE_KMER = 5;
- private MiniDFSCluster dfsCluster;
- private MiniMRCluster mrCluster;
- private FileSystem dfs;
-
- @SuppressWarnings("resource")
- @Test
- public void test() throws Exception {
- FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
- FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
- startHadoop();
-
- // run graph transformation tests
- CountFilterDriver tldriver = new CountFilterDriver();
- tldriver.run(HDFS_PATH, RESULT_PATH, COUNT_REDUCER, 1, HADOOP_CONF_PATH);
-
- SequenceFile.Reader reader = null;
- Path path = new Path(RESULT_PATH + "/part-00000");
- reader = new SequenceFile.Reader(dfs, path, conf);
- KmerBytesWritable key = (KmerBytesWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
- ByteWritable value = (ByteWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
- File filePathTo = new File(TEST_SOURCE_DIR);
- BufferedWriter bw = new BufferedWriter(new FileWriter(filePathTo));
- while (reader.next(key, value)) {
- bw.write(key.toString() + "\t" + value.toString());
- bw.newLine();
- }
- bw.close();
-
- dumpResult();
- TestUtils.compareWithResult(new File(TEST_SOURCE_DIR), new File(EXPECTED_PATH));
-
- cleanupHadoop();
-
- }
- private void startHadoop() throws IOException {
- FileSystem lfs = FileSystem.getLocal(new Configuration());
- lfs.delete(new Path("build"), true);
- System.setProperty("hadoop.log.dir", "logs");
- dfsCluster = new MiniDFSCluster(conf, 2, true, null);
- dfs = dfsCluster.getFileSystem();
- mrCluster = new MiniMRCluster(4, dfs.getUri().toString(), 2);
-
- Path src = new Path(DATA_PATH);
- Path dest = new Path(HDFS_PATH + "/");
- dfs.mkdirs(dest);
- dfs.copyFromLocalFile(src, dest);
-
- DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
- conf.writeXml(confOutput);
- confOutput.flush();
- confOutput.close();
- }
-
- private void cleanupHadoop() throws IOException {
- mrCluster.shutdown();
- dfsCluster.shutdown();
- }
-
- private void dumpResult() throws IOException {
- Path src = new Path(RESULT_PATH);
- Path dest = new Path(ACTUAL_RESULT_DIR + "/");
- dfs.copyToLocalFile(src, dest);
- }
-}
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmerging/MergePathTest.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmerging/MergePathTest.java
deleted file mode 100644
index 5e6e51e..0000000
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmerging/MergePathTest.java
+++ /dev/null
@@ -1,106 +0,0 @@
-package edu.uci.ics.pathmerging;
-
-import java.io.BufferedWriter;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-
-import edu.uci.ics.genomix.type.KmerBytesWritable;
-import edu.uci.ics.genomix.type.VKmerBytesWritable;
-import edu.uci.ics.utils.TestUtils;
-
-@SuppressWarnings("deprecation")
-public class MergePathTest {
- private static final String ACTUAL_RESULT_DIR = "actual3";
- private static final String COMPARE_DIR = "compare";
- private JobConf conf = new JobConf();
- private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
- private static final String DATA_PATH = "actual2" + "/result2" + "/part-00000";
- private static final String HDFS_PATH = "/webmap";
- private static final String HDFA_PATH_DATA = "/webmapdata";
-
- private static final String RESULT_PATH = "/result3";
- private static final String EXPECTED_PATH = "expected/result3";
- private static final String TEST_SOURCE_DIR = COMPARE_DIR + RESULT_PATH + "/comparesource.txt";
- private static final int COUNT_REDUCER = 1;
- private static final int SIZE_KMER = 3;
-
- private MiniDFSCluster dfsCluster;
- private MiniMRCluster mrCluster;
- private FileSystem dfs;
-
- @SuppressWarnings("resource")
- @Test
- public void test() throws Exception {
- FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
- FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
- startHadoop();
-
- MergePathDriver tldriver = new MergePathDriver();
- tldriver.run(HDFS_PATH, RESULT_PATH, HDFA_PATH_DATA, COUNT_REDUCER, SIZE_KMER, 1, HADOOP_CONF_PATH);
-
-/* SequenceFile.Reader reader = null;
- Path path = new Path(RESULT_PATH + "/part-00000");
-// Path path = new Path(RESULT_PATH + "/uncomplete0" + "/uncomplete0-r-00000");
- reader = new SequenceFile.Reader(dfs, path, conf);
- VKmerBytesWritable key = (VKmerBytesWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
- MergePathValueWritable value = (MergePathValueWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
- File filePathTo = new File(TEST_SOURCE_DIR);
- BufferedWriter bw = new BufferedWriter(new FileWriter(filePathTo));
- while (reader.next(key, value)) {
- bw.write(key.toString() + "\t" + value.getAdjBitMap() + "\t" + value.getFlag());
- bw.newLine();
- }
- bw.close();*/
- dumpResult();
-
-// TestUtils.compareWithResult(new File(TEST_SOURCE_DIR), new File(EXPECTED_PATH));
-
- cleanupHadoop();
-
- }
- private void startHadoop() throws IOException {
- FileSystem lfs = FileSystem.getLocal(new Configuration());
- lfs.delete(new Path("build"), true);
- System.setProperty("hadoop.log.dir", "logs");
- dfsCluster = new MiniDFSCluster(conf, 2, true, null);
- dfs = dfsCluster.getFileSystem();
- mrCluster = new MiniMRCluster(4, dfs.getUri().toString(), 2);
-
- Path src = new Path(DATA_PATH);
- Path dest = new Path(HDFS_PATH + "/");
- dfs.mkdirs(dest);
- dfs.copyFromLocalFile(src, dest);
- Path data = new Path(HDFA_PATH_DATA + "/");
- dfs.mkdirs(data);
-
- DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
- conf.writeXml(confOutput);
- confOutput.flush();
- confOutput.close();
- }
-
- private void cleanupHadoop() throws IOException {
- mrCluster.shutdown();
- dfsCluster.shutdown();
- }
-
- private void dumpResult() throws IOException {
-// Path src = new Path(HDFA_PATH_DATA + "/" + "complete2");
- Path src = new Path(RESULT_PATH);
- Path dest = new Path(ACTUAL_RESULT_DIR + "/");
- dfs.copyToLocalFile(src, dest);
- }
-}
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmergingh2/MergePathH2Test.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmergingh2/MergePathH2Test.java
deleted file mode 100644
index ff15299..0000000
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/pathmergingh2/MergePathH2Test.java
+++ /dev/null
@@ -1,105 +0,0 @@
-package edu.uci.ics.pathmergingh2;
-
-import java.io.BufferedWriter;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.FileWriter;
-import java.io.IOException;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.hadoop.io.SequenceFile;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.util.ReflectionUtils;
-import org.junit.Test;
-import edu.uci.ics.genomix.type.KmerBytesWritable;
-import edu.uci.ics.genomix.type.VKmerBytesWritable;
-import edu.uci.ics.utils.TestUtils;
-
-@SuppressWarnings("deprecation")
-public class MergePathH2Test {
- private static final String ACTUAL_RESULT_DIR = "actual4";
- private static final String COMPARE_DIR = "compare";
- private JobConf conf = new JobConf();
- private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
- private static final String DATA_PATH = "actual2" + "/result2" + "/part-00000";
- private static final String HDFS_PATH = "/webmap";
- private static final String HDFA_PATH_DATA = "/webmapdata";
-
- private static final String RESULT_PATH = "/result4";
- private static final String EXPECTED_PATH = "expected/result4";
- private static final String TEST_SOURCE_DIR = COMPARE_DIR + RESULT_PATH + "/comparesource.txt";
- private static final int COUNT_REDUCER = 1;
- private static final int SIZE_KMER = 3;
-
- private MiniDFSCluster dfsCluster;
- private MiniMRCluster mrCluster;
- private FileSystem dfs;
-
- @SuppressWarnings("resource")
- @Test
- public void test() throws Exception {
- FileUtils.forceMkdir(new File(ACTUAL_RESULT_DIR));
- FileUtils.cleanDirectory(new File(ACTUAL_RESULT_DIR));
- startHadoop();
-
- MergePathH2Driver tldriver = new MergePathH2Driver();
- tldriver.run(HDFS_PATH, RESULT_PATH, HDFA_PATH_DATA, COUNT_REDUCER, SIZE_KMER, 1, HADOOP_CONF_PATH);
-
-/* SequenceFile.Reader reader = null;
-// Path path = new Path(RESULT_PATH + "/part-00000");
- Path path = new Path(RESULT_PATH + "/uncomplete0" + "/uncomplete0-r-00000");
- reader = new SequenceFile.Reader(dfs, path, conf);
- VKmerBytesWritable key = (VKmerBytesWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
- MergePathValueWritable value = (MergePathValueWritable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
- File filePathTo = new File(TEST_SOURCE_DIR);
- BufferedWriter bw = new BufferedWriter(new FileWriter(filePathTo));
- while (reader.next(key, value)) {
- bw.write(key.toString() + "\t" + value.getAdjBitMap() + "\t" + value.getFlag());
- bw.newLine();
- }
- bw.close();*/
-// dumpResult();
-
-// TestUtils.compareWithResult(new File(TEST_SOURCE_DIR), new File(EXPECTED_PATH));
-
- cleanupHadoop();
-
- }
- private void startHadoop() throws IOException {
- FileSystem lfs = FileSystem.getLocal(new Configuration());
- lfs.delete(new Path("build"), true);
- System.setProperty("hadoop.log.dir", "logs");
- dfsCluster = new MiniDFSCluster(conf, 2, true, null);
- dfs = dfsCluster.getFileSystem();
- mrCluster = new MiniMRCluster(4, dfs.getUri().toString(), 2);
-
- Path src = new Path(DATA_PATH);
- Path dest = new Path(HDFS_PATH + "/");
- dfs.mkdirs(dest);
- dfs.copyFromLocalFile(src, dest);
- Path data = new Path(HDFA_PATH_DATA + "/");
- dfs.mkdirs(data);
-
- DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
- conf.writeXml(confOutput);
- confOutput.flush();
- confOutput.close();
- }
-
- private void cleanupHadoop() throws IOException {
- mrCluster.shutdown();
- dfsCluster.shutdown();
- }
-
- private void dumpResult() throws IOException {
-// Path src = new Path(HDFA_PATH_DATA + "/" + "complete2");
- Path src = new Path(RESULT_PATH);
- Path dest = new Path(ACTUAL_RESULT_DIR + "/");
- dfs.copyToLocalFile(src, dest);
- }
-}
\ No newline at end of file
diff --git a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/utils/TestUtils.java b/genomix/genomix-hadoop/src/test/java/edu/uci/ics/utils/TestUtils.java
deleted file mode 100755
index 015017a..0000000
--- a/genomix/genomix-hadoop/src/test/java/edu/uci/ics/utils/TestUtils.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package edu.uci.ics.utils;
-
-/*
- * Copyright 2009-2012 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-
-/**
- * This class offer the service for graphbuildingtest.class
- */
-public class TestUtils {
- public static void compareWithResult(File expectedFile, File actualFile) throws Exception {
- BufferedReader readerExpected = new BufferedReader(new FileReader(expectedFile));
- BufferedReader readerActual = new BufferedReader(new FileReader(actualFile));
- String lineExpected, lineActual;
- int num = 1;
- try {
- while ((lineExpected = readerExpected.readLine()) != null) {
- lineActual = readerActual.readLine();
- // Assert.assertEquals(lineExpected, lineActual);
- if (lineActual == null) {
- throw new Exception("Actual result changed at line " + num + ":\n< " + lineExpected + "\n> ");
- }
- if (!equalStrings(lineExpected, lineActual)) {
- throw new Exception("Result for changed at line " + num + ":\n< " + lineExpected + "\n> "
- + lineActual);
- }
- ++num;
- }
- lineActual = readerActual.readLine();
- if (lineActual != null) {
- throw new Exception("Actual result changed at line " + num + ":\n< \n> " + lineActual);
- }
- } finally {
- readerExpected.close();
- readerActual.close();
- }
- }
-
- private static boolean equalStrings(String s1, String s2) {
- String[] rowsOne = s1.split("\t");
- String[] rowsTwo = s2.split("\t");
-
- if (rowsOne.length != rowsTwo.length)
- return false;
-
- for (int i = 0; i < rowsOne.length; i++) {
- String row1 = rowsOne[i];
- String row2 = rowsTwo[i];
-
- if (row1.equals(row2))
- continue;
- else
- return false;
- }
- return true;
- }
-
- public static void main(String[] args) throws Exception {
- TestUtils TUtils = new TestUtils();
- }
-}