ASTERIXDB-1126 Correct comparators and type traits for external index
In case of external data, the type definition does not contain information
about primary keys and we should get them based on the data input format.
Change-Id: I71d924d7e2b7a7e6c752bc97679e612946afc17c
Reviewed-on: https://asterix-gerrit.ics.uci.edu/504
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Ian Maxon <imaxon@apache.org>
diff --git a/asterix-app/data/csv/sample_01.csv b/asterix-app/data/csv/sample_01.csv
index 4dd437a..fbba382 100644
--- a/asterix-app/data/csv/sample_01.csv
+++ b/asterix-app/data/csv/sample_01.csv
@@ -1,8 +1,8 @@
-1,0.899682764,5.6256,2013-08-07,07:22:35,1979-02-25T23:48:27.034
-2,0.669052398,,-1923-03-29,19:33:34,-1979-02-25T23:48:27.002
-3,0.572733058,192674,-1923-03-28,19:33:34,-1979-02-25T23:48:27.001
-4,,192674,-1923-03-27,19:33:34,-1979-02-25T23:48:27.001
-5,0.572733058,192674,,19:33:34,-1979-02-25T23:48:27.001
-6,0.572733058,192674,-1923-03-25,,-1979-02-25T23:48:27.001
-7,0.572733058,192674,-1923-03-24,19:33:34,
+1,0.899682764,5.6256,2013-08-07,07:22:35,1979-02-25T23:48:27.034
+2,0.669052398,,-1923-03-29,19:33:34,-1979-02-25T23:48:27.002
+3,0.572733058,192674,-1923-03-28,19:33:34,-1979-02-25T23:48:27.001
+4,,192674,-1923-03-27,19:33:34,-1979-02-25T23:48:27.001
+5,0.572733058,192674,,19:33:34,-1979-02-25T23:48:27.001
+6,0.572733058,192674,-1923-03-25,,-1979-02-25T23:48:27.001
+7,0.572733058,192674,-1923-03-24,19:33:34,
8,,,,,
\ No newline at end of file
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index c9a1b8e..85c9336 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -94,6 +94,18 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>jar</goal>
+ <goal>test-jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
</build>
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
index 6d2de65..66680c4 100644
--- a/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/HDFSCluster.java
@@ -36,7 +36,6 @@
*
* @author ramangrover29
*/
-@SuppressWarnings("deprecation")
public class HDFSCluster {
private static final String PATH_TO_HADOOP_CONF = "src/test/resources/hadoop/conf";
@@ -64,25 +63,27 @@
* Called prior to running the Runtime test suite.
*/
public void setup() throws Exception {
- conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/core-site.xml"));
- conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
- conf.addResource(new Path(PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
+ setup("");
+ }
+
+ public void setup(String basePath) throws Exception {
+ conf.addResource(new Path(basePath + PATH_TO_HADOOP_CONF + "/core-site.xml"));
+ conf.addResource(new Path(basePath + PATH_TO_HADOOP_CONF + "/mapred-site.xml"));
+ conf.addResource(new Path(basePath + PATH_TO_HADOOP_CONF + "/hdfs-site.xml"));
cleanupLocal();
- //this constructor is deprecated in hadoop 2x
- //dfsCluster = new MiniDFSCluster(nameNodePort, conf, numDataNodes, true, true, StartupOption.REGULAR, null);
MiniDFSCluster.Builder build = new MiniDFSCluster.Builder(conf);
build.nameNodePort(nameNodePort);
build.numDataNodes(numDataNodes);
build.startupOption(StartupOption.REGULAR);
dfsCluster = build.build();
dfs = FileSystem.get(conf);
- loadData();
+ loadData(basePath);
}
- private void loadData() throws IOException {
+ private void loadData(String localDataRoot) throws IOException {
Path destDir = new Path(HDFS_PATH);
dfs.mkdirs(destDir);
- File srcDir = new File(DATA_PATH);
+ File srcDir = new File(localDataRoot + DATA_PATH);
File[] listOfFiles = srcDir.listFiles();
for (File srcFile : listOfFiles) {
Path path = new Path(srcFile.getAbsolutePath());
@@ -108,12 +109,10 @@
HDFSCluster cluster = new HDFSCluster();
cluster.setup();
JobConf conf = configureJobConf();
- FileSystem fs = FileSystem.get(conf);
InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, 0);
for (InputSplit split : inputSplits) {
System.out.println("split :" + split);
}
- // cluster.cleanup();
}
private static JobConf configureJobConf() throws Exception {