Merge master to madhusudancs/error-reporting.
diff --git a/.gitignore b/.gitignore
index 328b7d3..31c9fc1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,3 +3,14 @@
.settings
.project
ClusterControllerService
+actual
+build
+edu.uci.ics.hyracks.control.nc.NodeControllerService
+exception
+expected
+teststore1
+teststore2
+derby.log
+hadoop-conf-tmp
+metastore_db
+teststore
diff --git a/algebricks/algebricks-common/pom.xml b/algebricks/algebricks-common/pom.xml
index 8311b8a..0e840fa 100644
--- a/algebricks/algebricks-common/pom.xml
+++ b/algebricks/algebricks-common/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,7 +27,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/algebricks/algebricks-compiler/pom.xml b/algebricks/algebricks-compiler/pom.xml
index 74a8ef7..2f78812 100644
--- a/algebricks/algebricks-compiler/pom.xml
+++ b/algebricks/algebricks-compiler/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,12 +27,12 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-rewriter</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-core</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/algebricks/algebricks-core/pom.xml b/algebricks/algebricks-core/pom.xml
index dc2ddd0..11b03ee 100644
--- a/algebricks/algebricks-core/pom.xml
+++ b/algebricks/algebricks-core/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,27 +27,27 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-runtime</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
index b8bdf3e..ba51eff 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/LogicalOperatorTag.java
@@ -27,7 +27,6 @@
INNERJOIN,
LEFTOUTERJOIN,
LIMIT,
- DIE,
NESTEDTUPLESOURCE,
ORDER,
PROJECT,
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
index 0efb5ff..c3a59ea 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/base/PhysicalOperatorTag.java
@@ -33,7 +33,6 @@
SPLIT,
STABLE_SORT,
STREAM_LIMIT,
- STREAM_DIE,
STREAM_SELECT,
STREAM_PROJECT,
STRING_STREAM_SCRIPT,
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
index 2f53f9b..0c105d0 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
@@ -20,10 +20,11 @@
// private ArrayList<AggregateFunctionCallExpression> expressions;
// TODO type safe list of expressions
private List<Mutable<ILogicalExpression>> mergeExpressions;
- private LogicalVariable partitioningVariable;
+ private boolean global;
public AggregateOperator(List<LogicalVariable> variables, List<Mutable<ILogicalExpression>> expressions) {
super(variables, expressions);
+ global = true;
}
@Override
@@ -69,12 +70,12 @@
return mergeExpressions;
}
- public void setPartitioningVariable(LogicalVariable partitioningVariable) {
- this.partitioningVariable = partitioningVariable;
+ public void setGlobal(boolean global) {
+ this.global = global;
}
- public LogicalVariable getPartitioningVariable() {
- return partitioningVariable;
+ public boolean isGlobal() {
+ return global;
}
@Override
@@ -90,4 +91,5 @@
}
return env;
}
+
}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
deleted file mode 100644
index 03bfcba..0000000
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DieOperator.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical;
-
-import java.util.ArrayList;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
-import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
-import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
-
-public class DieOperator extends AbstractLogicalOperator {
-
- private final Mutable<ILogicalExpression> afterObjects; // mandatory
-
- public DieOperator(ILogicalExpression maxObjectsExpr) {
- this.afterObjects = new MutableObject<ILogicalExpression>(maxObjectsExpr);
- }
-
- public Mutable<ILogicalExpression> getAfterObjects() {
- return afterObjects;
- }
-
- @Override
- public void recomputeSchema() {
- schema = new ArrayList<LogicalVariable>();
- schema.addAll(inputs.get(0).getValue().getSchema());
- }
-
- @Override
- public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
- return visitor.visitDieOperator(this, arg);
- }
-
- @Override
- public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform visitor) throws AlgebricksException {
- boolean b = false;
- if (visitor.transform(afterObjects)) {
- b = true;
- }
- return b;
- }
-
- @Override
- public LogicalOperatorTag getOperatorTag() {
- return LogicalOperatorTag.DIE;
- }
-
- @Override
- public VariablePropagationPolicy getVariablePropagationPolicy() {
- return VariablePropagationPolicy.ALL;
- }
-
- @Override
- public boolean isMap() {
- return true;
- }
-
- @Override
- public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
- return createPropagatingAllInputsTypeEnvironment(ctx);
- }
-
- @Override
- public boolean requiresVariableReferenceExpressions() {
- return false;
- }
-}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
index b1da831..fed1a15 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/DistinctOperator.java
@@ -28,7 +28,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.VariablePropagationPolicy;
import edu.uci.ics.hyracks.algebricks.core.algebra.typing.ITypingContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.typing.NonPropagatingTypeEnvironment;
import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionReferenceTransform;
import edu.uci.ics.hyracks.algebricks.core.algebra.visitors.ILogicalOperatorVisitor;
@@ -50,15 +49,12 @@
@Override
public void recomputeSchema() {
- if (schema == null) {
- schema = new ArrayList<LogicalVariable>();
- }
- schema.clear();
- for (Mutable<ILogicalExpression> eRef : expressions) {
- ILogicalExpression e = eRef.getValue();
- if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
- VariableReferenceExpression v = (VariableReferenceExpression) e;
- schema.add(v.getVariableReference());
+ schema = new ArrayList<LogicalVariable>();
+ schema.addAll(this.getDistinctByVarList());
+ List<LogicalVariable> inputSchema = inputs.get(0).getValue().getSchema();
+ for (LogicalVariable var : inputSchema) {
+ if (!schema.contains(var)) {
+ schema.add(var);
}
}
}
@@ -69,12 +65,16 @@
@Override
public void propagateVariables(IOperatorSchema target, IOperatorSchema... sources)
throws AlgebricksException {
- for (Mutable<ILogicalExpression> eRef : expressions) {
- ILogicalExpression e = eRef.getValue();
- if (e.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
- VariableReferenceExpression v = (VariableReferenceExpression) e;
- target.addVariable(v.getVariableReference());
- }
+ /** make sure distinct key vars laid-out first */
+ for (LogicalVariable keyVar : getDistinctByVarList()) {
+ target.addVariable(keyVar);
+ }
+ /** add other source vars */
+ for (IOperatorSchema srcSchema : sources) {
+ for (LogicalVariable srcVar : srcSchema)
+ if (target.findVariable(srcVar) < 0) {
+ target.addVariable(srcVar);
+ }
}
}
};
@@ -128,16 +128,7 @@
@Override
public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
- IVariableTypeEnvironment env = new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMetadataProvider());
- IVariableTypeEnvironment childEnv = ctx.getOutputTypeEnvironment(inputs.get(0).getValue());
- for (Mutable<ILogicalExpression> exprRef : expressions) {
- ILogicalExpression expr = exprRef.getValue();
- if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
- VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr;
- env.setVarType(varRefExpr.getVariableReference(), childEnv.getType(expr));
- }
- }
- return env;
+ return createPropagatingAllInputsTypeEnvironment(ctx);
}
}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
index 1b4be1e..527a11c 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/FDsAndEquivClassesVisitor.java
@@ -45,7 +45,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -357,12 +356,6 @@
}
@Override
- public Void visitDieOperator(DieOperator op, IOptimizationContext ctx) throws AlgebricksException {
- propagateFDsAndEquivClasses(op, ctx);
- return null;
- }
-
- @Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, IOptimizationContext ctx)
throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) op.getDataSourceReference().getValue();
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
index ac6d887..745b8c6 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -36,7 +36,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -180,16 +179,6 @@
}
@Override
- public Boolean visitDieOperator(DieOperator op, ILogicalOperator arg) throws AlgebricksException {
- AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
- if (aop.getOperatorTag() != LogicalOperatorTag.DIE)
- return Boolean.FALSE;
- DieOperator dieOpArg = (DieOperator) copyAndSubstituteVar(op, arg);
- boolean isomorphic = op.getAfterObjects().getValue().equals(dieOpArg.getAfterObjects().getValue());
- return isomorphic;
- }
-
- @Override
public Boolean visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg) throws AlgebricksException {
AbstractLogicalOperator aop = (AbstractLogicalOperator) arg;
if (aop.getOperatorTag() != LogicalOperatorTag.INNERJOIN)
@@ -643,11 +632,6 @@
}
@Override
- public ILogicalOperator visitDieOperator(DieOperator op, Void arg) throws AlgebricksException {
- return new DieOperator(deepCopyExpressionRef(op.getAfterObjects()).getValue());
- }
-
- @Override
public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, Void arg) throws AlgebricksException {
return new InnerJoinOperator(deepCopyExpressionRef(op.getCondition()), op.getInputs().get(0), op
.getInputs().get(1));
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
index b9544c7..ac8cabe 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismVariableMappingVisitor.java
@@ -35,7 +35,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -109,12 +108,6 @@
}
@Override
- public Void visitDieOperator(DieOperator op, ILogicalOperator arg) throws AlgebricksException {
- mapVariablesStandard(op, arg);
- return null;
- }
-
- @Override
public Void visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg) throws AlgebricksException {
mapVariablesStandard(op, arg);
return null;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
index 8f1d686..e4fd78e 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalPropertiesVisitor.java
@@ -27,7 +27,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -142,12 +141,6 @@
}
@Override
- public Void visitDieOperator(DieOperator op, IOptimizationContext arg) throws AlgebricksException {
- // TODO Auto-generated method stub
- return null;
- }
-
- @Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, IOptimizationContext arg)
throws AlgebricksException {
// TODO Auto-generated method stub
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
index 31adcba..91ffe4b 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/ProducedVariableVisitor.java
@@ -30,7 +30,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -136,11 +135,6 @@
}
@Override
- public Void visitDieOperator(DieOperator op, Void arg) throws AlgebricksException {
- return null;
- }
-
- @Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) throws AlgebricksException {
return null;
}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
index cd0cee3..5225ac7 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SchemaVariableVisitor.java
@@ -14,7 +14,9 @@
*/
package edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.visitors;
+import java.util.ArrayList;
import java.util.Collection;
+import java.util.List;
import org.apache.commons.lang3.mutable.Mutable;
@@ -29,7 +31,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -86,11 +87,17 @@
@Override
public Void visitDistinctOperator(DistinctOperator op, Void arg) throws AlgebricksException {
- for (Mutable<ILogicalExpression> exprRef : op.getExpressions()) {
- ILogicalExpression expr = exprRef.getValue();
- if (expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
- VariableReferenceExpression varRefExpr = (VariableReferenceExpression) expr;
- schemaVariables.add(varRefExpr.getVariableReference());
+ List<LogicalVariable> allLiveVars = new ArrayList<LogicalVariable>();
+ for (Mutable<ILogicalOperator> c : op.getInputs()) {
+ VariableUtilities.getLiveVariables(c.getValue(), allLiveVars);
+ }
+ VariableUtilities.getProducedVariables(op, allLiveVars);
+ /** put distinct vars first */
+ schemaVariables.addAll(op.getDistinctByVarList());
+ /** then other live vars */
+ for (LogicalVariable var : allLiveVars) {
+ if (!schemaVariables.contains(var)) {
+ schemaVariables.add(var);
}
}
return null;
@@ -152,12 +159,6 @@
}
@Override
- public Void visitDieOperator(DieOperator op, Void arg) throws AlgebricksException {
- standardLayout(op);
- return null;
- }
-
- @Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) throws AlgebricksException {
VariableUtilities.getLiveVariables(op.getSourceOperator(), schemaVariables);
return null;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
index 69fb3f8..bb9b600 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/SubstituteVariableVisitor.java
@@ -31,7 +31,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -185,14 +184,6 @@
}
@Override
- public Void visitDieOperator(DieOperator op, Pair<LogicalVariable, LogicalVariable> pair)
- throws AlgebricksException {
- op.getAfterObjects().getValue().substituteVar(pair.first, pair.second);
- substVarTypes(op, pair);
- return null;
- }
-
- @Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Pair<LogicalVariable, LogicalVariable> pair)
throws AlgebricksException {
return null;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
index 5361a19..42f7e20 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/logical/visitors/UsedVariableVisitor.java
@@ -30,7 +30,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -78,9 +77,6 @@
for (Mutable<ILogicalExpression> exprRef : op.getExpressions()) {
exprRef.getValue().getUsedVariables(usedVariables);
}
- if (op.getPartitioningVariable() != null) {
- usedVariables.add(op.getPartitioningVariable());
- }
return null;
}
@@ -198,12 +194,6 @@
}
@Override
- public Void visitDieOperator(DieOperator op, Void arg) {
- op.getAfterObjects().getValue().getUsedVariables(usedVariables);
- return null;
- }
-
- @Override
public Void visitNestedTupleSourceOperator(NestedTupleSourceOperator op, Void arg) {
// does not use any variable
return null;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
index 81dc2c2..f47c2ec 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
@@ -16,12 +16,10 @@
import java.util.ArrayList;
import java.util.List;
-import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.ListSet;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
@@ -31,15 +29,14 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
+import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.UnorderedPartitionedProperty;
import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
import edu.uci.ics.hyracks.algebricks.runtime.base.IAggregateEvaluatorFactory;
@@ -58,25 +55,29 @@
@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
- AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
- IPhysicalPropertiesVector childProps = op2.getDeliveredPhysicalProperties();
- deliveredProperties = new StructuralPropertiesVector(childProps.getPartitioningProperty(),
- new ArrayList<ILocalStructuralProperty>(0));
+ AggregateOperator aggOp = (AggregateOperator) op;
+ ILogicalOperator op2 = op.getInputs().get(0).getValue();
+ if (aggOp.getExecutionMode() != AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
+ deliveredProperties = new StructuralPropertiesVector(op2.getDeliveredPhysicalProperties()
+ .getPartitioningProperty(), new ArrayList<ILocalStructuralProperty>());
+ } else {
+ deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED,
+ new ArrayList<ILocalStructuralProperty>());
+ }
}
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent) {
AggregateOperator aggOp = (AggregateOperator) op;
- if (aggOp.getExecutionMode() == ExecutionMode.PARTITIONED && aggOp.getPartitioningVariable() != null) {
- StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
- Set<LogicalVariable> partitioningVariables = new ListSet<LogicalVariable>();
- partitioningVariables.add(aggOp.getPartitioningVariable());
- pv[0] = new StructuralPropertiesVector(new UnorderedPartitionedProperty(partitioningVariables, null), null);
+ StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
+ if (aggOp.isGlobal() && aggOp.getExecutionMode() == AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
+ pv[0] = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, null);
return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
} else {
return emptyUnaryRequirements();
}
+
}
@Override
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
index 302d4d2..9a96319 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
@@ -100,10 +100,9 @@
context, columns);
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getResultHandleRuntime(
- resultOp.getDataSink(), columns, pf, inputDesc, false, spec);
+ resultOp.getDataSink(), columns, pf, inputDesc, true, spec);
builder.contributeHyracksOperator(resultOp, runtimeAndConstraints.first);
- builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = resultOp.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, resultOp, 0);
}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java
index e000b85..5fd6ef4 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeletePOperator.java
@@ -1,7 +1,6 @@
package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
import java.util.ArrayList;
-import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.mutable.Mutable;
@@ -22,13 +21,8 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IndexInsertDeleteOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.InsertDeleteOperator.Kind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder.OrderKind;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.ILocalStructuralProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.LocalOrderProperty;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.OrderColumn;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
@@ -45,13 +39,13 @@
private final IDataSourceIndex<?, ?> dataSourceIndex;
public IndexInsertDeletePOperator(List<LogicalVariable> primaryKeys, List<LogicalVariable> secondaryKeys,
- Mutable<ILogicalExpression> filterExpr, IDataSourceIndex<?, ?> dataSourceIndex) {
+ Mutable<ILogicalExpression> filterExpr, IDataSourceIndex<?, ?> dataSourceIndex) {
this.primaryKeys = primaryKeys;
this.secondaryKeys = secondaryKeys;
if (filterExpr != null) {
- this.filterExpr = filterExpr.getValue();
+ this.filterExpr = filterExpr.getValue();
} else {
- this.filterExpr = null;
+ this.filterExpr = null;
}
this.dataSourceIndex = dataSourceIndex;
}
@@ -64,7 +58,7 @@
@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
- deliveredProperties = (StructuralPropertiesVector) op2.getDeliveredPhysicalProperties().clone();
+ deliveredProperties = (StructuralPropertiesVector) op2.getDeliveredPhysicalProperties().clone();
}
@Override
@@ -73,15 +67,11 @@
List<LogicalVariable> scanVariables = new ArrayList<LogicalVariable>();
scanVariables.addAll(primaryKeys);
scanVariables.add(new LogicalVariable(-1));
- IPartitioningProperty pp = dataSourceIndex.getDataSource().getPropertiesProvider()
- .computePropertiesVector(scanVariables).getPartitioningProperty();
- List<ILocalStructuralProperty> orderProps = new LinkedList<ILocalStructuralProperty>();
- for (LogicalVariable k : secondaryKeys) {
- orderProps.add(new LocalOrderProperty(new OrderColumn(k, OrderKind.ASC)));
- }
- StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(pp,
- orderProps) };
- return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
+ IPhysicalPropertiesVector r = dataSourceIndex.getDataSource().getPropertiesProvider()
+ .computePropertiesVector(scanVariables);
+ IPhysicalPropertiesVector[] requirements = new IPhysicalPropertiesVector[1];
+ requirements[0] = r;
+ return new PhysicalRequirements(requirements, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@@ -93,8 +83,8 @@
IMetadataProvider mp = context.getMetadataProvider();
JobSpecification spec = builder.getJobSpec();
- RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0],
- context);
+ RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
+ context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = null;
IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(insertDeleteOp);
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java
index 29af97c..b5656cd 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/PreSortedDistinctByPOperator.java
@@ -105,14 +105,24 @@
fdColumns[j++] = inputSchemas[0].findVariable(v);
}
}
+ int[] keysAndDecs = new int[keys.length + fdColumns.length];
+ for (int i = 0; i < keys.length; i++) {
+ keysAndDecs[i] = keys[i];
+ }
+ for (int i = 0; i < fdColumns.length; i++) {
+ keysAndDecs[i + keys.length] = fdColumns[i];
+ }
+
IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
columnList, context.getTypeEnvironment(op), context);
IAggregateEvaluatorFactory[] aggFactories = new IAggregateEvaluatorFactory[] {};
IAggregatorDescriptorFactory aggregatorFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(
- aggFactories, keys, fdColumns);
+ aggFactories, keysAndDecs);
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
- PreclusteredGroupOperatorDescriptor opDesc = new PreclusteredGroupOperatorDescriptor(spec, keys,
+ RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
+ context);
+ /** make fd columns part of the key but the comparator only compares the distinct key columns */
+ PreclusteredGroupOperatorDescriptor opDesc = new PreclusteredGroupOperatorDescriptor(spec, keysAndDecs,
comparatorFactories, aggregatorFactory, recordDescriptor);
contributeOpDesc(builder, (AbstractLogicalOperator) op, opDesc);
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamDiePOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamDiePOperator.java
deleted file mode 100644
index 73b9b79..0000000
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamDiePOperator.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IHyracksJobBuilder;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningRequirementsCoordinator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPhysicalPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.PhysicalRequirements;
-import edu.uci.ics.hyracks.algebricks.core.algebra.properties.StructuralPropertiesVector;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenContext;
-import edu.uci.ics.hyracks.algebricks.core.jobgen.impl.JobGenHelper;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.std.StreamDieRuntimeFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-
-public class StreamDiePOperator extends AbstractPhysicalOperator {
-
- public StreamDiePOperator() {
- }
-
- @Override
- public PhysicalOperatorTag getOperatorTag() {
- return PhysicalOperatorTag.STREAM_DIE;
- }
-
- @Override
- public boolean isMicroOperator() {
- return true;
- }
-
- @Override
- public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
- ILogicalOperator op2 = op.getInputs().get(0).getValue();
- deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
- }
-
- @Override
- public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
- IPhysicalPropertiesVector reqdByParent) {
- StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
- pv[0] = (StructuralPropertiesVector) reqdByParent;
- return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
- }
-
- @Override
- public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
- IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
- DieOperator die = (DieOperator) op;
- IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
- IVariableTypeEnvironment env = context.getTypeEnvironment(op);
- IScalarEvaluatorFactory afterObjectsFact = expressionRuntimeProvider.createEvaluatorFactory(die
- .getAfterObjects().getValue(), env, inputSchemas, context);
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
- StreamDieRuntimeFactory runtime = new StreamDieRuntimeFactory(afterObjectsFact, null,
- context.getBinaryIntegerInspectorFactory());
- builder.contributeMicroOperator(die, runtime, recDesc);
- ILogicalOperator src = die.getInputs().get(0).getValue();
- builder.contributeGraphEdge(src, 0, die, 0);
- }
-
-}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
index 11e24d7..531b300 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
@@ -22,6 +22,7 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.base.PhysicalOperatorTag;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IExpressionRuntimeProvider;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.IVariableTypeEnvironment;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.IOperatorSchema;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.properties.IPartitioningProperty;
@@ -37,10 +38,8 @@
public class StreamLimitPOperator extends AbstractPhysicalOperator {
- private boolean global;
+ public StreamLimitPOperator() {
- public StreamLimitPOperator(boolean global) {
- this.global = global;
}
@Override
@@ -55,14 +54,22 @@
@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
+ AbstractLogicalOperator limitOp = (AbstractLogicalOperator) op;
ILogicalOperator op2 = op.getInputs().get(0).getValue();
- deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
+ if (limitOp.getExecutionMode() == AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
+ //partitioning property: unpartitioned; local property: whatever from the child
+ deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, op2
+ .getDeliveredPhysicalProperties().getLocalProperties());
+ } else {
+ deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
+ }
}
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent) {
- if (global) {
+ AbstractLogicalOperator limitOp = (AbstractLogicalOperator) op;
+ if (limitOp.getExecutionMode() == AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
StructuralPropertiesVector[] pv = new StructuralPropertiesVector[1];
pv[0] = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, null);
return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
@@ -83,7 +90,8 @@
ILogicalExpression offsetExpr = limit.getOffset().getValue();
IScalarEvaluatorFactory offsetFact = (offsetExpr == null) ? null : expressionRuntimeProvider
.createEvaluatorFactory(offsetExpr, env, inputSchemas, context);
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+ RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
+ context);
StreamLimitRuntimeFactory runtime = new StreamLimitRuntimeFactory(maxObjectsFact, offsetFact, null,
context.getBinaryIntegerInspectorFactory());
builder.contributeMicroOperator(limit, runtime, recDesc);
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
index 54c0505..7861dc0 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
@@ -31,6 +31,8 @@
public class StreamProjectPOperator extends AbstractPropagatePropertiesForUsedVariablesPOperator {
+ private boolean flushFramesRapidly;
+
@Override
public PhysicalOperatorTag getOperatorTag() {
return PhysicalOperatorTag.STREAM_PROJECT;
@@ -61,8 +63,9 @@
}
projectionList[i++] = pos;
}
- StreamProjectRuntimeFactory runtime = new StreamProjectRuntimeFactory(projectionList);
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+ StreamProjectRuntimeFactory runtime = new StreamProjectRuntimeFactory(projectionList, flushFramesRapidly);
+ RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
+ context);
builder.contributeMicroOperator(project, runtime, recDesc);
ILogicalOperator src = project.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, project, 0);
@@ -74,4 +77,8 @@
computeDeliveredPropertiesForUsedVariables(p, p.getVariables());
}
+ public void setRapidFrameFlush(boolean flushFramesRapidly) {
+ this.flushFramesRapidly = flushFramesRapidly;
+ }
+
}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
index fc0c433..4b7597c 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
@@ -28,7 +28,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -256,13 +255,6 @@
}
@Override
- public String visitDieOperator(DieOperator op, Integer indent) {
- StringBuilder buffer = new StringBuilder();
- addIndent(buffer, indent).append("die after " + op.getAfterObjects().getValue());
- return buffer.toString();
- }
-
- @Override
public String visitExchangeOperator(ExchangeOperator op, Integer indent) {
StringBuilder buffer = new StringBuilder();
addIndent(buffer, indent).append("exchange ");
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ResultSetDomain.java
similarity index 68%
rename from hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
rename to algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ResultSetDomain.java
index 8f5ed64..ca15346 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/properties/ResultSetDomain.java
@@ -3,19 +3,25 @@
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.hyracks.api.dataset;
+package edu.uci.ics.hyracks.algebricks.core.algebra.properties;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+public class ResultSetDomain implements INodeDomain {
+ @Override
+ public boolean sameAs(INodeDomain domain) {
+ return true;
+ }
-public interface IDatasetPartitionReader {
- public void writeTo(IFrameWriter writer);
+ @Override
+ public Integer cardinality() {
+ return 0;
+ }
}
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
index 47a979c..35b36dc 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
@@ -27,6 +27,7 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
+import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
@@ -96,14 +97,21 @@
break;
}
default: {
+ boolean forceUnpartitioned = false;
if (op.getOperatorTag() == LogicalOperatorTag.LIMIT) {
LimitOperator opLim = (LimitOperator) op;
if (opLim.isTopmostLimitOp()) {
- if (opLim.getExecutionMode() != AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
- opLim.setExecutionMode(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
- change = true;
- }
- break;
+ opLim.setExecutionMode(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
+ change = true;
+ forceUnpartitioned = true;
+ }
+ }
+ if (op.getOperatorTag() == LogicalOperatorTag.AGGREGATE) {
+ AggregateOperator aggOp = (AggregateOperator) op;
+ if (aggOp.isGlobal()) {
+ op.setExecutionMode(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
+ change = true;
+ forceUnpartitioned = true;
}
}
@@ -112,6 +120,8 @@
AbstractLogicalOperator inputOp = (AbstractLogicalOperator) i.getValue();
switch (inputOp.getExecutionMode()) {
case PARTITIONED: {
+ if (forceUnpartitioned)
+ break;
op.setExecutionMode(AbstractLogicalOperator.ExecutionMode.PARTITIONED);
change = true;
exit = true;
diff --git a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
index 23dac2a..32b049e 100644
--- a/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
+++ b/algebricks/algebricks-core/src/main/java/edu/uci/ics/hyracks/algebricks/core/algebra/visitors/ILogicalOperatorVisitor.java
@@ -18,7 +18,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DieOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistributeResultOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
@@ -106,5 +105,4 @@
public R visitIndexInsertDeleteOperator(IndexInsertDeleteOperator op, T tag) throws AlgebricksException;
- public R visitDieOperator(DieOperator op, T arg) throws AlgebricksException;
}
diff --git a/algebricks/algebricks-data/pom.xml b/algebricks/algebricks-data/pom.xml
index 1b6646f..1ca1557 100644
--- a/algebricks/algebricks-data/pom.xml
+++ b/algebricks/algebricks-data/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,12 +27,12 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/algebricks/algebricks-examples/piglet-example/pom.xml b/algebricks/algebricks-examples/piglet-example/pom.xml
index 7294979..254d016 100644
--- a/algebricks/algebricks-examples/piglet-example/pom.xml
+++ b/algebricks/algebricks-examples/piglet-example/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-examples</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -48,6 +48,24 @@
</includes>
</configuration>
</plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>build-helper-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>add-source</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>add-source</goal>
+ </goals>
+ <configuration>
+ <sources>
+ <source>${project.build.directory}/generated-sources/javacc/</source>
+ </sources>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
</plugins>
<pluginManagement>
<plugins>
@@ -89,7 +107,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>junit</groupId>
diff --git a/algebricks/algebricks-examples/pom.xml b/algebricks/algebricks-examples/pom.xml
index b19150c..a02379d 100644
--- a/algebricks/algebricks-examples/pom.xml
+++ b/algebricks/algebricks-examples/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/algebricks/algebricks-rewriter/pom.xml b/algebricks/algebricks-rewriter/pom.xml
index 172d7b8..9fd5aac 100644
--- a/algebricks/algebricks-rewriter/pom.xml
+++ b/algebricks/algebricks-rewriter/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,7 +27,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-core</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
index 08271c1..013ddda 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
@@ -16,12 +16,10 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
@@ -110,16 +108,10 @@
} else {
// The local aggregate operator is fed by the input of the original aggregate operator.
pushedAgg.getInputs().add(new MutableObject<ILogicalOperator>(initAgg.getInputs().get(0).getValue()));
- // Set the partitioning variable in the local agg to ensure it is not projected away.
- context.computeAndSetTypeEnvironmentForOperator(pushedAgg);
- LogicalVariable trueVar = context.newVar();
// Reintroduce assign op for the global agg partitioning var.
- AssignOperator trueAssignOp = new AssignOperator(trueVar, new MutableObject<ILogicalExpression>(
- ConstantExpression.TRUE));
- trueAssignOp.getInputs().add(new MutableObject<ILogicalOperator>(pushedAgg));
- context.computeAndSetTypeEnvironmentForOperator(trueAssignOp);
- initAgg.setPartitioningVariable(trueVar);
- initAgg.getInputs().get(0).setValue(trueAssignOp);
+ initAgg.getInputs().get(0).setValue(pushedAgg);
+ pushedAgg.setGlobal(false);
+ context.computeAndSetTypeEnvironmentForOperator(pushedAgg);
}
return new Pair<Boolean, Mutable<ILogicalOperator>>(true, new MutableObject<ILogicalOperator>(pushedAgg));
} else {
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
index fa5000e..06172d9 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
@@ -55,6 +55,11 @@
return false;
}
+ //stop rewriting if the operators originates from a nested tuple source
+ if (insideSubplan(opRef)) {
+ return false;
+ }
+
// We may pull selects above the join we create in order to eliminate possible dependencies between
// the outer and inner input plans of the join.
List<ILogicalOperator> topSelects = new ArrayList<ILogicalOperator>();
@@ -286,4 +291,24 @@
return findPlanPartition((AbstractLogicalOperator) op.getInputs().get(0).getValue(), innerUsedVars,
outerUsedVars, innerOps, outerOps, topSelects, belowSecondUnnest);
}
+
+ /**
+ * check whether the operator is inside a sub-plan
+ *
+ * @param nestedRootRef
+ * @return true-if it is; false otherwise.
+ */
+ private boolean insideSubplan(Mutable<ILogicalOperator> nestedRootRef) {
+ AbstractLogicalOperator nestedRoot = (AbstractLogicalOperator) nestedRootRef.getValue();
+ if (nestedRoot.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
+ return true;
+ }
+ List<Mutable<ILogicalOperator>> inputs = nestedRoot.getInputs();
+ for (Mutable<ILogicalOperator> input : inputs) {
+ if (insideSubplan(input)) {
+ return true;
+ }
+ }
+ return false;
+ }
}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
index 3260ca0..00670fa 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
@@ -45,12 +45,18 @@
public class EnforceOrderByAfterSubplan implements IAlgebraicRewriteRule {
/** a set of order-breaking operators */
private final Set<LogicalOperatorTag> orderBreakingOps = new HashSet<LogicalOperatorTag>();
+ /** a set of order-sensitive operators */
+ private final Set<LogicalOperatorTag> orderSensitiveOps = new HashSet<LogicalOperatorTag>();
public EnforceOrderByAfterSubplan() {
/** add operators that break the ordering */
orderBreakingOps.add(LogicalOperatorTag.INNERJOIN);
orderBreakingOps.add(LogicalOperatorTag.LEFTOUTERJOIN);
orderBreakingOps.add(LogicalOperatorTag.UNIONALL);
+ orderBreakingOps.add(LogicalOperatorTag.AGGREGATE);
+
+ /** add operators that are sensitive to the ordering */
+ orderSensitiveOps.add(LogicalOperatorTag.LIMIT);
}
@Override
@@ -89,19 +95,25 @@
* duplicate them on-top-of the subplan operator
*/
boolean foundTarget = true;
- AbstractLogicalOperator child = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
+ boolean orderSensitive = false;
+ Mutable<ILogicalOperator> childRef = op.getInputs().get(0);
+ AbstractLogicalOperator child = (AbstractLogicalOperator) childRef.getValue();
while (child.getOperatorTag() != LogicalOperatorTag.ORDER) {
context.addToDontApplySet(this, child);
if (orderBreakingOps.contains(child.getOperatorTag())) {
foundTarget = false;
break;
}
+ if (orderSensitiveOps.contains(child.getOperatorTag())) {
+ orderSensitive = true;
+ }
List<Mutable<ILogicalOperator>> childInputs = child.getInputs();
if (childInputs == null || childInputs.size() > 2 || childInputs.size() < 1) {
foundTarget = false;
break;
} else {
- child = (AbstractLogicalOperator) childInputs.get(0).getValue();
+ childRef = childInputs.get(0);
+ child = (AbstractLogicalOperator) childRef.getValue();
}
}
/** the target order-by operator has not been found. */
@@ -109,7 +121,7 @@
return false;
}
- /** duplicate the order-by operator and insert on-top-of the subplan operator */
+ /** copy the original order-by operator and insert on-top-of the subplan operator */
context.addToDontApplySet(this, child);
OrderOperator sourceOrderOp = (OrderOperator) child;
List<Pair<IOrder, Mutable<ILogicalExpression>>> orderExprs = deepCopyOrderAndExpression(sourceOrderOp
@@ -119,6 +131,11 @@
inputs.set(i, new MutableObject<ILogicalOperator>(newOrderOp));
newOrderOp.getInputs().add(inputOpRef);
context.computeAndSetTypeEnvironmentForOperator(newOrderOp);
+
+ if (!orderSensitive) {
+ /** remove the original order-by */
+ childRef.setValue(sourceOrderOp.getInputs().get(0).getValue());
+ }
changed = true;
}
return changed;
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
index f017e0f..9becf6e 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
@@ -48,57 +48,46 @@
/**
* Factors out common sub-expressions by assigning them to a variables, and replacing the common sub-expressions with references to those variables.
- *
* Preconditions/Assumptions:
* Assumes no projects are in the plan. This rule ignores variable reference expressions and constants (other rules deal with those separately).
- *
* Postconditions/Examples:
* Plan with extracted sub-expressions. Generates one assign operator per extracted expression.
- *
* Example 1 - Simple Arithmetic Example (simplified)
- *
* Before plan:
* assign [$$1] <- [5 + 6 - 10]
- * assign [$$0] <- [5 + 6 + 30]
- *
+ * assign [$$0] <- [5 + 6 + 30]
* After plan:
* assign [$$1] <- [$$5 - 10]
- * assign [$$0] <- [$$5 + 30]
- * assign [$$5] <- [5 + 6]
- *
+ * assign [$$0] <- [$$5 + 30]
+ * assign [$$5] <- [5 + 6]
* Example 2 - Cleaning up 'Distinct By' (simplified)
- *
* Before plan: (notice how $$0 is not live after the distinct)
* assign [$$3] <- [field-access($$0, 1)]
- * distinct ([%0->$$5])
- * assign [$$5] <- [field-access($$0, 1)]
- * unnest $$0 <- [scan-dataset]
- *
+ * distinct ([%0->$$5])
+ * assign [$$5] <- [field-access($$0, 1)]
+ * unnest $$0 <- [scan-dataset]
* After plan: (notice how the issue of $$0 is fixed)
* assign [$$3] <- [$$5]
- * distinct ([$$5])
- * assign [$$5] <- [field-access($$0, 1)]
- * unnest $$0 <- [scan-dataset]
- *
+ * distinct ([$$5])
+ * assign [$$5] <- [field-access($$0, 1)]
+ * unnest $$0 <- [scan-dataset]
* Example 3 - Pulling Common Expressions Above Joins (simplified)
- *
* Before plan:
* assign [$$9] <- funcZ(funcY($$8))
- * join (funcX(funcY($$8)))
- *
+ * join (funcX(funcY($$8)))
* After plan:
* assign [$$9] <- funcZ($$10))
- * select (funcX($$10))
- * assign [$$10] <- [funcY($$8)]
- * join (TRUE)
+ * select (funcX($$10))
+ * assign [$$10] <- [funcY($$8)]
+ * join (TRUE)
*/
public class ExtractCommonExpressionsRule implements IAlgebraicRewriteRule {
private final List<ILogicalExpression> originalAssignExprs = new ArrayList<ILogicalExpression>();
-
+
private final CommonExpressionSubstitutionVisitor substVisitor = new CommonExpressionSubstitutionVisitor();
private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap = new HashMap<ILogicalExpression, ExprEquivalenceClass>();
-
+
// Set of operators for which common subexpression elimination should not be performed.
private static final Set<LogicalOperatorTag> ignoreOps = new HashSet<LogicalOperatorTag>();
static {
@@ -109,9 +98,10 @@
ignoreOps.add(LogicalOperatorTag.AGGREGATE);
ignoreOps.add(LogicalOperatorTag.RUNNINGAGGREGATE);
}
-
+
@Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -126,7 +116,8 @@
return modified;
}
- private void updateEquivalenceClassMap(LogicalVariable lhs, Mutable<ILogicalExpression> rhsExprRef, ILogicalExpression rhsExpr, ILogicalOperator op) {
+ private void updateEquivalenceClassMap(LogicalVariable lhs, Mutable<ILogicalExpression> rhsExprRef,
+ ILogicalExpression rhsExpr, ILogicalOperator op) {
ExprEquivalenceClass exprEqClass = exprEqClassMap.get(rhsExpr);
if (exprEqClass == null) {
exprEqClass = new ExprEquivalenceClass(op, rhsExprRef);
@@ -141,7 +132,7 @@
if (context.checkIfInDontApplySet(this, opRef.getValue())) {
return false;
}
-
+
boolean modified = false;
// Recurse into children.
for (Mutable<ILogicalOperator> inputOpRef : op.getInputs()) {
@@ -149,7 +140,7 @@
modified = true;
}
}
-
+
// TODO: Deal with replicate properly. Currently, we just clear the expr equivalence map, since we want to avoid incorrect expression replacement
// (the resulting new variables should be assigned live below a replicate).
if (op.getOperatorTag() == LogicalOperatorTag.REPLICATE) {
@@ -160,7 +151,7 @@
if (ignoreOps.contains(op.getOperatorTag())) {
return modified;
}
-
+
// Remember a copy of the original assign expressions, so we can add them to the equivalence class map
// after replacing expressions within the assign operator itself.
if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
@@ -173,13 +164,13 @@
originalAssignExprs.add(expr.cloneExpression());
}
}
-
+
// Perform common subexpression elimination.
substVisitor.setOperator(op);
if (op.acceptExpressionTransform(substVisitor)) {
modified = true;
}
-
+
// Update equivalence class map.
if (op.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
AssignOperator assignOp = (AssignOperator) op;
@@ -194,7 +185,7 @@
// Update equivalence class map.
LogicalVariable lhs = assignOp.getVariables().get(i);
updateEquivalenceClassMap(lhs, exprRef, exprRef.getValue(), op);
-
+
// Update equivalence class map with original assign expression.
updateEquivalenceClassMap(lhs, exprRef, originalAssignExprs.get(i), op);
}
@@ -225,35 +216,30 @@
}
private class CommonExpressionSubstitutionVisitor implements ILogicalExpressionReferenceTransform {
-
- private final Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
- private final List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+
private IOptimizationContext context;
- private ILogicalOperator op;
-
+ private ILogicalOperator op;
+
public void setContext(IOptimizationContext context) {
this.context = context;
}
-
+
public void setOperator(ILogicalOperator op) throws AlgebricksException {
this.op = op;
- liveVars.clear();
- usedVars.clear();
}
-
+
@Override
public boolean transform(Mutable<ILogicalExpression> exprRef) throws AlgebricksException {
- if (liveVars.isEmpty() && usedVars.isEmpty()) {
- VariableUtilities.getLiveVariables(op, liveVars);
- VariableUtilities.getUsedVariables(op, usedVars);
- }
-
AbstractLogicalExpression expr = (AbstractLogicalExpression) exprRef.getValue();
boolean modified = false;
ExprEquivalenceClass exprEqClass = exprEqClassMap.get(expr);
if (exprEqClass != null) {
// Replace common subexpression with existing variable.
if (exprEqClass.variableIsSet()) {
+ Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
+ List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
+ VariableUtilities.getLiveVariables(op, liveVars);
+ VariableUtilities.getUsedVariables(op, usedVars);
// Check if the replacing variable is live at this op.
// However, if the op is already using variables that are not live, then a replacement may enable fixing the plan.
// This behavior is necessary to, e.g., properly deal with distinct by.
@@ -266,9 +252,15 @@
}
} else {
if (assignCommonExpression(exprEqClass, expr)) {
- exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
- // Do not descend into children since this expr has been completely replaced.
- return true;
+ //re-obtain the live vars after rewriting in the method called in the if condition
+ Set<LogicalVariable> liveVars = new HashSet<LogicalVariable>();
+ VariableUtilities.getLiveVariables(op, liveVars);
+ //rewrite only when the variable is live
+ if (liveVars.contains(exprEqClass.getVariable())) {
+ exprRef.setValue(new VariableReferenceExpression(exprEqClass.getVariable()));
+ // Do not descend into children since this expr has been completely replaced.
+ return true;
+ }
}
}
} else {
@@ -278,7 +270,7 @@
exprEqClassMap.put(expr, exprEqClass);
}
}
-
+
// Descend into function arguments.
if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
@@ -290,17 +282,19 @@
}
return modified;
}
-
- private boolean assignCommonExpression(ExprEquivalenceClass exprEqClass, ILogicalExpression expr) throws AlgebricksException {
+
+ private boolean assignCommonExpression(ExprEquivalenceClass exprEqClass, ILogicalExpression expr)
+ throws AlgebricksException {
AbstractLogicalOperator firstOp = (AbstractLogicalOperator) exprEqClass.getFirstOperator();
Mutable<ILogicalExpression> firstExprRef = exprEqClass.getFirstExpression();
- if (firstOp.getOperatorTag() == LogicalOperatorTag.INNERJOIN || firstOp.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
+ if (firstOp.getOperatorTag() == LogicalOperatorTag.INNERJOIN
+ || firstOp.getOperatorTag() == LogicalOperatorTag.LEFTOUTERJOIN) {
// Do not extract common expressions from within the same join operator.
if (firstOp == op) {
return false;
}
AbstractBinaryJoinOperator joinOp = (AbstractBinaryJoinOperator) firstOp;
- Mutable<ILogicalExpression> joinCond = joinOp.getCondition();
+ Mutable<ILogicalExpression> joinCond = joinOp.getCondition();
ILogicalExpression enclosingExpr = getEnclosingExpression(joinCond, firstExprRef.getValue());
if (enclosingExpr == null) {
// No viable enclosing expression that we can pull out from the join.
@@ -312,12 +306,13 @@
op.getInputs().get(0).setValue(selectOp);
// Set firstOp to be the select below op, since we want to assign the common subexpr there.
firstOp = (AbstractLogicalOperator) selectOp;
- } else if (firstOp.getInputs().size() > 1) {
+ } else if (firstOp.getInputs().size() > 1) {
// Bail for any non-join operator with multiple inputs.
return false;
- }
+ }
LogicalVariable newVar = context.newVar();
- AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(firstExprRef.getValue().cloneExpression()));
+ AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(firstExprRef
+ .getValue().cloneExpression()));
// Place assign below firstOp.
newAssign.getInputs().add(new MutableObject<ILogicalOperator>(firstOp.getInputs().get(0).getValue()));
newAssign.setExecutionMode(firstOp.getExecutionMode());
@@ -330,7 +325,8 @@
return true;
}
- private ILogicalExpression getEnclosingExpression(Mutable<ILogicalExpression> conditionExprRef, ILogicalExpression commonSubExpr) {
+ private ILogicalExpression getEnclosingExpression(Mutable<ILogicalExpression> conditionExprRef,
+ ILogicalExpression commonSubExpr) {
ILogicalExpression conditionExpr = conditionExprRef.getValue();
if (conditionExpr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return null;
@@ -371,7 +367,7 @@
return enclosingBoolExpr;
}
}
-
+
private boolean containsExpr(ILogicalExpression expr, ILogicalExpression searchExpr) {
if (expr == searchExpr) {
return true;
@@ -387,7 +383,7 @@
}
return false;
}
-
+
private boolean isEqJoinCondition(ILogicalExpression expr) {
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
if (funcExpr.getFunctionIdentifier().equals(AlgebricksBuiltinFunctions.EQ)) {
@@ -400,38 +396,38 @@
}
return false;
}
-
+
private final class ExprEquivalenceClass {
// First operator in which expression is used.
private final ILogicalOperator firstOp;
-
+
// Reference to expression in first op.
private final Mutable<ILogicalExpression> firstExprRef;
-
+
// Variable that this expression has been assigned to.
private LogicalVariable var;
-
+
public ExprEquivalenceClass(ILogicalOperator firstOp, Mutable<ILogicalExpression> firstExprRef) {
this.firstOp = firstOp;
this.firstExprRef = firstExprRef;
}
-
+
public ILogicalOperator getFirstOperator() {
return firstOp;
}
-
+
public Mutable<ILogicalExpression> getFirstExpression() {
return firstExprRef;
}
-
+
public void setVariable(LogicalVariable var) {
this.var = var;
}
-
+
public LogicalVariable getVariable() {
return var;
}
-
+
public boolean variableIsSet() {
return var != null;
}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java
index c3d935c..c75db57 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceAggregateCombinerRule.java
@@ -29,7 +29,7 @@
return false;
}
AggregateOperator aggOp = (AggregateOperator) op;
- if (aggOp.getExecutionMode() != ExecutionMode.PARTITIONED || aggOp.getPartitioningVariable() == null) {
+ if (!aggOp.isGlobal() || aggOp.getExecutionMode() == ExecutionMode.LOCAL) {
return false;
}
Map<AggregateFunctionCallExpression, SimilarAggregatesInfo> toReplaceMap = new HashMap<AggregateFunctionCallExpression, SimilarAggregatesInfo>();
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java
index 18c483e..489167f 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/IntroduceGroupByForSubplanRule.java
@@ -143,8 +143,10 @@
Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context);
if (pkVars == null || pkVars.size() < 1) {
- // could not group only by primary keys
- return false;
+ // there is no non-trivial primary key, group-by keys are all live variables
+ ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
+ pkVars = new HashSet<LogicalVariable>();
+ VariableUtilities.getLiveVariables(subplanInput, pkVars);
}
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushDieUpRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushDieUpRule.java
deleted file mode 100644
index c4dd78d..0000000
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushDieUpRule.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.rewriter.rules;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.IOptimizationContext;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
-
-public class PushDieUpRule implements IAlgebraicRewriteRule {
-
- @Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
- return false;
- }
-
- @Override
- public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
- throws AlgebricksException {
- AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
- if (op0.getInputs().size() == 0)
- return false;
- AbstractLogicalOperator op1 = (AbstractLogicalOperator) op0.getInputs().get(0).getValue();
-
- if (op1.getInputs().size() == 0)
- return false;
- LogicalOperatorTag tag = op1.getOperatorTag();
- if (tag == LogicalOperatorTag.SINK || tag == LogicalOperatorTag.WRITE
- || tag == LogicalOperatorTag.INSERT_DELETE || tag == LogicalOperatorTag.WRITE_RESULT)
- return false;
-
- AbstractLogicalOperator op2 = (AbstractLogicalOperator) op1.getInputs().get(0).getValue();
- if (op2.getOperatorTag() == LogicalOperatorTag.DIE) {
- op0.getInputs().get(0).setValue(op2);
- op1.getInputs().clear();
- for (Mutable<ILogicalOperator> ref : op2.getInputs())
- op1.getInputs().add(ref);
- op2.getInputs().clear();
- op2.getInputs().add(new MutableObject<ILogicalOperator>(op1));
-
- context.computeAndSetTypeEnvironmentForOperator(op0);
- context.computeAndSetTypeEnvironmentForOperator(op1);
- context.computeAndSetTypeEnvironmentForOperator(op2);
- return true;
- } else {
- return false;
- }
- }
-}
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java
index acfdbd3..e5f60bc 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/PushLimitDownRule.java
@@ -106,7 +106,7 @@
opLim.getMaxObjects(), opLim.getOffset());
clone2 = new LimitOperator(maxPlusOffset, false);
}
- clone2.setPhysicalOperator(new StreamLimitPOperator(false));
+ clone2.setPhysicalOperator(new StreamLimitPOperator());
clone2.getInputs().add(new MutableObject<ILogicalOperator>(op2));
clone2.setExecutionMode(op2.getExecutionMode());
clone2.recomputeSchema();
diff --git a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
index 60a4fbb..9c8ad46 100644
--- a/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
+++ b/algebricks/algebricks-rewriter/src/main/java/edu/uci/ics/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
@@ -21,7 +21,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IDataSource;
import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator.ExecutionMode;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AbstractOperatorWithNestedPlans;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
@@ -53,7 +52,6 @@
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.SinkPOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.SinkWritePOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StableSortPOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamDiePOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamLimitPOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamProjectPOperator;
import edu.uci.ics.hyracks.algebricks.core.algebra.operators.physical.StreamSelectPOperator;
@@ -173,9 +171,7 @@
break;
}
case LIMIT: {
- LimitOperator opLim = (LimitOperator) op;
- op.setPhysicalOperator(new StreamLimitPOperator(opLim.isTopmostLimitOp()
- && opLim.getExecutionMode() == ExecutionMode.PARTITIONED));
+ op.setPhysicalOperator(new StreamLimitPOperator());
break;
}
case NESTEDTUPLESOURCE: {
@@ -280,10 +276,6 @@
op.setPhysicalOperator(new SinkPOperator());
break;
}
- case DIE: {
- op.setPhysicalOperator(new StreamDiePOperator());
- break;
- }
}
}
if (op.hasNestedPlans()) {
diff --git a/algebricks/algebricks-runtime/pom.xml b/algebricks/algebricks-runtime/pom.xml
index bdb8c0f..a2ad833 100644
--- a/algebricks/algebricks-runtime/pom.xml
+++ b/algebricks/algebricks-runtime/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,27 +27,27 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-data</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java
index 83925cc..76b6fcf 100644
--- a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java
+++ b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/aggreg/SimpleAlgebricksAccumulatingAggregatorFactory.java
@@ -34,8 +34,7 @@
private static final long serialVersionUID = 1L;
private IAggregateEvaluatorFactory[] aggFactories;
- public SimpleAlgebricksAccumulatingAggregatorFactory(IAggregateEvaluatorFactory[] aggFactories, int[] keys,
- int[] fdColumns) {
+ public SimpleAlgebricksAccumulatingAggregatorFactory(IAggregateEvaluatorFactory[] aggFactories, int[] keys) {
this.aggFactories = aggFactories;
}
diff --git a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java
index cd9931b..79e964b 100644
--- a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java
+++ b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/base/AbstractOneInputOneOutputOneFramePushRuntime.java
@@ -54,6 +54,11 @@
}
protected void appendProjectionToFrame(int tIndex, int[] projectionList) throws HyracksDataException {
+ appendProjectionToFrame(tIndex, projectionList, false);
+ }
+
+ protected void appendProjectionToFrame(int tIndex, int[] projectionList, boolean flushFrame)
+ throws HyracksDataException {
if (!appender.appendProjection(tAccess, tIndex, projectionList)) {
FrameUtils.flushFrame(frame, writer);
appender.reset(frame, true);
@@ -61,6 +66,11 @@
throw new IllegalStateException(
"Could not write frame (AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame).");
}
+ return;
+ }
+ if (flushFrame) {
+ FrameUtils.flushFrame(frame, writer);
+ appender.reset(frame, true);
}
}
diff --git a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamDieRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamDieRuntimeFactory.java
deleted file mode 100644
index 508ce5d..0000000
--- a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamDieRuntimeFactory.java
+++ /dev/null
@@ -1,100 +0,0 @@
-package edu.uci.ics.hyracks.algebricks.runtime.operators.std;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspector;
-import edu.uci.ics.hyracks.algebricks.data.IBinaryIntegerInspectorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluator;
-import edu.uci.ics.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime;
-import edu.uci.ics.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputRuntimeFactory;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.data.std.api.IPointable;
-import edu.uci.ics.hyracks.data.std.primitive.VoidPointable;
-
-public class StreamDieRuntimeFactory extends AbstractOneInputOneOutputRuntimeFactory {
-
- private static final long serialVersionUID = 1L;
-
- private IScalarEvaluatorFactory aftterObjectsEvalFactory;
- private IBinaryIntegerInspectorFactory binaryIntegerInspectorFactory;
-
- public StreamDieRuntimeFactory(IScalarEvaluatorFactory maxObjectsEvalFactory, int[] projectionList,
- IBinaryIntegerInspectorFactory binaryIntegerInspectorFactory) {
- super(projectionList);
- this.aftterObjectsEvalFactory = maxObjectsEvalFactory;
- this.binaryIntegerInspectorFactory = binaryIntegerInspectorFactory;
- }
-
- @Override
- public String toString() {
- String s = "stream-die " + aftterObjectsEvalFactory.toString();
- return s;
- }
-
- @Override
- public AbstractOneInputOneOutputOneFramePushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx) {
- final IBinaryIntegerInspector bii = binaryIntegerInspectorFactory.createBinaryIntegerInspector(ctx);
- return new AbstractOneInputOneOutputOneFramePushRuntime() {
- private IPointable p = VoidPointable.FACTORY.createPointable();
- private IScalarEvaluator evalAfterObjects;
- private int toWrite = -1;
-
- @Override
- public void open() throws HyracksDataException {
- if (evalAfterObjects == null) {
- initAccessAppendRef(ctx);
- try {
- evalAfterObjects = aftterObjectsEvalFactory.createScalarEvaluator(ctx);
- } catch (AlgebricksException ae) {
- throw new HyracksDataException(ae);
- }
- }
- writer.open();
- }
-
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- tAccess.reset(buffer);
- int nTuple = tAccess.getTupleCount();
- if (toWrite < 0) {
- toWrite = evaluateInteger(evalAfterObjects, 0);
- }
- for (int t = 0; t < nTuple; t++) {
- if (toWrite > 0) {
- toWrite--;
- if (projectionList != null) {
- appendProjectionToFrame(t, projectionList);
- } else {
- appendTupleToFrame(t);
- }
- } else {
- throw new HyracksDataException("injected failure");
-// System.out.println("Injected Kill-JVM");
-// System.exit(-1);
- }
- }
- }
-
- @Override
- public void close() throws HyracksDataException {
- super.close();
- }
-
- private int evaluateInteger(IScalarEvaluator eval, int tIdx) throws HyracksDataException {
- tRef.reset(tAccess, tIdx);
- try {
- eval.evaluate(tRef, p);
- } catch (AlgebricksException ae) {
- throw new HyracksDataException(ae);
- }
- int lim = bii.getIntegerValue(p.getByteArray(), p.getStartOffset(), p.getLength());
- return lim;
- }
-
- };
- }
-
-}
diff --git a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
index 455ad8e..0e6fc15 100644
--- a/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
+++ b/algebricks/algebricks-runtime/src/main/java/edu/uci/ics/hyracks/algebricks/runtime/operators/std/StreamProjectRuntimeFactory.java
@@ -26,9 +26,16 @@
public class StreamProjectRuntimeFactory extends AbstractOneInputOneOutputRuntimeFactory {
private static final long serialVersionUID = 1L;
+ private final boolean flushFramesRapidly;
+
+ public StreamProjectRuntimeFactory(int[] projectionList, boolean flushFramesRapidly) {
+ super(projectionList);
+ this.flushFramesRapidly = flushFramesRapidly;
+ }
public StreamProjectRuntimeFactory(int[] projectionList) {
super(projectionList);
+ this.flushFramesRapidly = false;
}
@Override
@@ -57,7 +64,16 @@
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
tAccess.reset(buffer);
int nTuple = tAccess.getTupleCount();
- for (int t = 0; t < nTuple; t++) {
+
+ int t = 0;
+ for (; t < nTuple - 1; t++) {
+ appendProjectionToFrame(t, projectionList);
+ }
+ if (flushFramesRapidly) {
+ // Whenever all the tuples in the incoming frame have been consumed, the project operator
+ // will push its frame to the next operator; i.e., it won't wait until the frame gets full.
+ appendProjectionToFrame(t, projectionList, true);
+ } else {
appendProjectionToFrame(t, projectionList);
}
diff --git a/algebricks/algebricks-tests/pom.xml b/algebricks/algebricks-tests/pom.xml
index 02b7c45..031dfbe 100644
--- a/algebricks/algebricks-tests/pom.xml
+++ b/algebricks/algebricks-tests/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -85,7 +85,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>junit</groupId>
@@ -96,17 +96,17 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java b/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
index 3de0966..e3cd7d8 100644
--- a/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
+++ b/algebricks/algebricks-tests/src/test/java/edu/uci/ics/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
@@ -493,7 +493,7 @@
new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(IntegerPointable.FACTORY) });
IAggregateEvaluatorFactory[] aggFuns = new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() };
IAggregatorDescriptorFactory aggFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(aggFuns,
- new int[] { 3 }, new int[] {});
+ new int[] { 3 });
HashGroupOperatorDescriptor gby = new HashGroupOperatorDescriptor(spec, new int[] { 3 }, tpcf,
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
aggFactory, gbyDesc, 1024);
diff --git a/algebricks/pom.xml b/algebricks/pom.xml
index d7c871a..e1abc89 100644
--- a/algebricks/pom.xml
+++ b/algebricks/pom.xml
@@ -2,7 +2,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<packaging>pom</packaging>
<name>algebricks</name>
diff --git a/hivesterix/hivesterix-common/pom.xml b/hivesterix/hivesterix-common/pom.xml
index bf0ec0a..2dbb788 100644
--- a/hivesterix/hivesterix-common/pom.xml
+++ b/hivesterix/hivesterix-common/pom.xml
@@ -7,7 +7,7 @@
<parent>
<artifactId>hivesterix</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -30,21 +30,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-hdfs-core</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hivesterix/hivesterix-dist/pom.xml b/hivesterix/hivesterix-dist/pom.xml
index aef0320..5f4c46b 100644
--- a/hivesterix/hivesterix-dist/pom.xml
+++ b/hivesterix/hivesterix-dist/pom.xml
@@ -2,13 +2,13 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-dist</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<name>hivesterix-dist</name>
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
@@ -276,14 +276,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-translator</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-optimizer</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
@@ -297,21 +297,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hivesterix/hivesterix-dist/src/main/resources/conf/cluster.properties b/hivesterix/hivesterix-dist/src/main/resources/conf/cluster.properties
index 2d2401a..056cce4 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/conf/cluster.properties
+++ b/hivesterix/hivesterix-dist/src/main/resources/conf/cluster.properties
@@ -4,9 +4,6 @@
#The CC port for Hyracks cluster management
CC_CLUSTERPORT=1099
-#The directory of hyracks binaries
-HYRACKS_HOME=../../../../hyracks
-
#The tmp directory for cc to install jars
CCTMP_DIR=/tmp/t1
@@ -29,9 +26,11 @@
FRAME_SIZE=65536
#CC JAVA_OPTS
-CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+CCJAVA_OPTS="-Xmx1g -Djava.util.logging.config.file=logging.properties"
+# debug option: CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
#NC JAVA_OPTS
-NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
-
+NCJAVA_OPTS="-Xmx1g -Djava.util.logging.config.file=logging.properties"
+# debug option: NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/getip.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/getip.sh
index 8c9ae76..1b44d09 100755
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/getip.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/getip.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
#get the OS
OS_NAME=`uname -a|awk '{print $1}'`
LINUX_OS='Linux'
@@ -6,10 +24,10 @@
then
#Get IP Address
IPADDR=`/sbin/ifconfig eth0 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
- if [ "$IPADDR" = "" ]
+ if [ "$IPADDR" = "" ]
then
- IPADDR=`/sbin/ifconfig em1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
- fi
+ IPADDR=`/sbin/ifconfig em1 | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
+ fi
if [ "$IPADDR" = "" ]
then
IPADDR=`/sbin/ifconfig lo | grep "inet " | awk '{print $2}' | cut -f 2 -d ':'`
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/hive b/hivesterix/hivesterix-dist/src/main/resources/scripts/hive
index f98f340..38a9e33 100755
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/hive
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/hive
@@ -81,7 +81,7 @@
exit 3;
fi
-CLASSPATH=${CLASSPATH}:${HIVE_LIB}/a-hive-path.jar
+CLASSPATH=${CLASSPATH}:${HIVE_LIB}/a-hive-patch.jar
for f in ${HIVE_LIB}/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/hivesterixcc b/hivesterix/hivesterix-dist/src/main/resources/scripts/hivesterixcc
new file mode 100755
index 0000000..d2adffd
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/hivesterixcc
@@ -0,0 +1,122 @@
+#!/bin/sh
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
+# resolve links - $0 may be a softlink
+PRG="$0"
+
+while [ -h "$PRG" ]; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`/"$link"
+ fi
+done
+
+PRGDIR=`dirname "$PRG"`
+BASEDIR=`cd "$PRGDIR/.." >/dev/null; pwd`
+
+
+
+# OS specific support. $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+case "`uname`" in
+ CYGWIN*) cygwin=true ;;
+ Darwin*) darwin=true
+ if [ -z "$JAVA_VERSION" ] ; then
+ JAVA_VERSION="CurrentJDK"
+ else
+ echo "Using Java version: $JAVA_VERSION"
+ fi
+ if [ -z "$JAVA_HOME" ] ; then
+ JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home
+ fi
+ ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+ if [ -r /etc/gentoo-release ] ; then
+ JAVA_HOME=`java-config --jre-home`
+ fi
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# If a specific java binary isn't specified search for the standard 'java' binary
+if [ -z "$JAVACMD" ] ; then
+ if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ else
+ JAVACMD=`which java`
+ fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+ echo "Error: JAVA_HOME is not defined correctly." 1>&2
+ echo " We cannot execute $JAVACMD" 1>&2
+ exit 1
+fi
+
+if [ -z "$REPO" ]
+then
+ REPO="$BASEDIR"/lib
+fi
+
+CLASSPATH=$CLASSPATH_PREFIX:"$HADOOP_HOME"/conf:/etc/hadoop/conf:"$BASEDIR"/etc:$1
+
+#add the desired hive patch
+CLASSPATH=${CLASSPATH}:${REPO}/a-hive-patch.jar
+
+#add the desired jetty jar
+for f in ${REPO}/servlet-api-3*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+for f in ${REPO}/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+ [ -n "$HOME" ] && HOME=`cygpath --path --windows "$HOME"`
+ [ -n "$BASEDIR" ] && BASEDIR=`cygpath --path --windows "$BASEDIR"`
+ [ -n "$REPO" ] && REPO=`cygpath --path --windows "$REPO"`
+fi
+
+exec "$JAVACMD" $JAVA_OPTS \
+ -classpath "$CLASSPATH" \
+ -Dapp.name="hivesterixcc" \
+ -Dapp.pid="$$" \
+ -Dapp.repo="$REPO" \
+ -Dapp.home="$BASEDIR" \
+ -Dbasedir="$BASEDIR" \
+ edu.uci.ics.hyracks.control.cc.CCDriver \
+ "$@"
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/hivesterixnc b/hivesterix/hivesterix-dist/src/main/resources/scripts/hivesterixnc
new file mode 100755
index 0000000..40f3ec6
--- /dev/null
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/hivesterixnc
@@ -0,0 +1,117 @@
+#!/bin/sh
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
+# resolve links - $0 may be a softlink
+PRG="$0"
+
+while [ -h "$PRG" ]; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`/"$link"
+ fi
+done
+
+PRGDIR=`dirname "$PRG"`
+BASEDIR=`cd "$PRGDIR/.." >/dev/null; pwd`
+
+
+
+# OS specific support. $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+case "`uname`" in
+ CYGWIN*) cygwin=true ;;
+ Darwin*) darwin=true
+ if [ -z "$JAVA_VERSION" ] ; then
+ JAVA_VERSION="CurrentJDK"
+ else
+ echo "Using Java version: $JAVA_VERSION"
+ fi
+ if [ -z "$JAVA_HOME" ] ; then
+ JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home
+ fi
+ ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+ if [ -r /etc/gentoo-release ] ; then
+ JAVA_HOME=`java-config --jre-home`
+ fi
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# If a specific java binary isn't specified search for the standard 'java' binary
+if [ -z "$JAVACMD" ] ; then
+ if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ else
+ JAVACMD=`which java`
+ fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+ echo "Error: JAVA_HOME is not defined correctly." 1>&2
+ echo " We cannot execute $JAVACMD" 1>&2
+ exit 1
+fi
+
+if [ -z "$REPO" ]
+then
+ REPO="$BASEDIR"/lib
+fi
+
+CLASSPATH=$CLASSPATH_PREFIX:"$HADOOP_HOME"/conf:/etc/hadoop/conf:"$BASEDIR"/etc:$1
+
+#add the desired hive patch
+CLASSPATH=${CLASSPATH}:${REPO}/a-hive-patch.jar
+
+for f in ${REPO}/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+ [ -n "$HOME" ] && HOME=`cygpath --path --windows "$HOME"`2
+ [ -n "$BASEDIR" ] && BASEDIR=`cygpath --path --windows "$BASEDIR"`
+ [ -n "$REPO" ] && REPO=`cygpath --path --windows "$REPO"`
+fi
+
+exec "$JAVACMD" $JAVA_OPTS \
+ -classpath "$CLASSPATH" \
+ -Dapp.name="hivesterixnc" \
+ -Dapp.pid="$$" \
+ -Dapp.repo="$REPO" \
+ -Dapp.home="$BASEDIR" \
+ -Dbasedir="$BASEDIR" \
+ edu.uci.ics.hyracks.control.nc.NCDriver \
+ "$@"
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startAllNCs.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startAllNCs.sh
index d30da26..34a318d 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/startAllNCs.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startAllNCs.sh
@@ -1,6 +1,24 @@
-PREGELIX_PATH=`pwd`
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
+HIVESTERIX_PATH=`pwd`
for i in `cat conf/slaves`
do
- ssh $i "cd ${PREGELIX_PATH}; export JAVA_HOME=${JAVA_HOME}; bin/startnc.sh"
+ ssh $i "cd ${HIVESTERIX_PATH}; export JAVA_HOME=${JAVA_HOME}; bin/startnc.sh"
done
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startCluster.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startCluster.sh
index 6aa9161..cddf9a4 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/startCluster.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startCluster.sh
@@ -1,19 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
bin/startcc.sh
sleep 5
bin/startAllNCs.sh
-
-. conf/cluster.properties
-# do we need to specify the version somewhere?
-hyrackcmd=`ls ${HYRACKS_HOME}/hyracks-cli/target/hyracks-cli-*-binary-assembly/bin/hyrackscli`
-# find zip file
-appzip=`ls $PWD/../hivesterix-dist-*-binary-assembly.zip`
-
-[ -f $hyrackcmd ] || { echo "Hyracks commandline is missing"; exit -1;}
-[ -f $appzip ] || { echo "Genomix binary-assembly.zip is missing"; exit -1;}
-
-CCHOST_NAME=`cat conf/master`
-
-IPADDR=`bin/getip.sh`
-echo "connect to \"${IPADDR}:${CC_CLIENTPORT}\"; create application hivesterix \"$appzip\";" | $hyrackcmd
-echo ""
-
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startDebugNc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startDebugNc.sh
index fe6cf27..cab8bb3 100755
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/startDebugNc.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startDebugNc.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
#Get the IP address of the cc
@@ -40,11 +58,11 @@
#Set JAVA_OPTS
export JAVA_OPTS=$NCJAVA_OPTS2
-cd $HYRACKS_HOME
-HYRACKS_HOME=`pwd`
+HIVESTERIX_HOME=`pwd`
#Enter the temp dir
cd $NCTMP_DIR2
#Launch hyracks nc
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
+#echo ${HIVESTERIX_HOME}/bin/hivesterixnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}"
+${HIVESTERIX_HOME}/bin/hivesterixnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startcc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startcc.sh
index 484ecac..213b9eb 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/startcc.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startcc.sh
@@ -1,4 +1,21 @@
#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
#Import cluster properties
@@ -20,12 +37,15 @@
export JAVA_HOME=$JAVA_HOME
export JAVA_OPTS=$CCJAVA_OPTS
-#Launch hyracks cc script
-chmod -R 755 $HYRACKS_HOME
+HIVESTERIX_HOME=`pwd`
+
+#Enter the temp dir
+cd $CCTMP_DIR
+
if [ -f "conf/topology.xml" ]; then
#Launch hyracks cc script with topology
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
+${HIVESTERIX_HOME}/bin/hivesterixcc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
else
#Launch hyracks cc script without toplogy
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
+${HIVESTERIX_HOME}/bin/hivesterixcc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
fi
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/startnc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/startnc.sh
index 23a4c36..904858d 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/startnc.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/startnc.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
MY_NAME=`hostname`
@@ -39,11 +57,11 @@
#Set JAVA_OPTS
export JAVA_OPTS=$NCJAVA_OPTS
-cd $HYRACKS_HOME
-HYRACKS_HOME=`pwd`
+HIVESTERIX_HOME=`pwd`
#Enter the temp dir
cd $NCTMP_DIR
#Launch hyracks nc
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
+#echo ${HIVESTERIX_HOME}/bin/hivesterixnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}"
+${HIVESTERIX_HOME}/bin/hivesterixnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopAllNCs.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopAllNCs.sh
index 12367c1..456ff68 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopAllNCs.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopAllNCs.sh
@@ -1,6 +1,24 @@
-PREGELIX_PATH=`pwd`
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
+HIVESTERIX_PATH=`pwd`
for i in `cat conf/slaves`
do
- ssh $i "cd ${PREGELIX_PATH}; bin/stopnc.sh"
+ ssh $i "cd ${HIVESTERIX_PATH}; bin/stopnc.sh"
done
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopCluster.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopCluster.sh
index 4889934..6b829aa 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopCluster.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopCluster.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
bin/stopAllNCs.sh
sleep 2
bin/stopcc.sh
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopcc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopcc.sh
index c2f525a..d7f78a8 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopcc.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopcc.sh
@@ -1,8 +1,37 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
. conf/cluster.properties
#Kill process
-PID=`ps -ef|grep ${USER}|grep java|grep hyracks|awk '{print $2}'`
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hivesterixcc'|awk '{print $2}'`
+
+if [ "$PID" == "" ]; then
+ PID=`ps -ef|grep ${USER}|grep java|grep 'hyracks'|awk '{print $2}'`
+fi
+
+if [ "$PID" == "" ]; then
+ USERID=`id | sed 's/^uid=//;s/(.*$//'`
+ PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hivesterixcc'|awk '{print $2}'`
+fi
+
echo $PID
kill -9 $PID
diff --git a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopnc.sh b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopnc.sh
index 35c4794..5dc4ccd 100644
--- a/hivesterix/hivesterix-dist/src/main/resources/scripts/stopnc.sh
+++ b/hivesterix/hivesterix-dist/src/main/resources/scripts/stopnc.sh
@@ -1,8 +1,26 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
. conf/cluster.properties
#Kill process
-PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hivesterixnc'|awk '{print $2}'`
if [ "$PID" == "" ]; then
PID=`ps -ef|grep ${USER}|grep java|grep 'hyracks'|awk '{print $2}'`
@@ -10,7 +28,7 @@
if [ "$PID" == "" ]; then
USERID=`id | sed 's/^uid=//;s/(.*$//'`
- PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+ PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hivesterixnc'|awk '{print $2}'`
fi
echo $PID
diff --git a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
index 8ac4e86..13574ea 100644
--- a/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
+++ b/hivesterix/hivesterix-dist/src/test/java/edu/uci/ics/hivesterix/test/optimizer/OptimizerTestSuite.java
@@ -36,7 +36,7 @@
if (isIgnored(qFile.getName(), ignores))
continue;
- if (qFile.isFile() && qFile.getName().startsWith("h11_")) {
+ if (qFile.isFile()) {
String resultFileName = hiveExtToResExt(qFile.getName());
File rFile = new File(PATH_TO_RESULTS + resultFileName);
testSuite.addTest(new OptimizerTestCase(qFile, rFile));
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml
index eef4071..e24aaa7 100644
--- a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/hive/conf/hive-default.xml
@@ -55,7 +55,7 @@
<property>
<name>hive.algebricks.groupby.external</name>
- <value>false</value>
+ <value>true</value>
</property>
<property>
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/ignore.txt b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/ignore.txt
index e70ea78..d4e892a 100644
--- a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/ignore.txt
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/ignore.txt
@@ -1 +1 @@
-q16
\ No newline at end of file
+q16_
\ No newline at end of file
diff --git a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/logging.properties b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/logging.properties
index 1cc34e1..f42e321 100644
--- a/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/logging.properties
+++ b/hivesterix/hivesterix-dist/src/test/resources/runtimefunctionts/logging.properties
@@ -60,6 +60,5 @@
# For example, set the com.xyz.foo logger to only log SEVERE
# messages:
-edu.uci.ics.asterix.level = WARNING
-edu.uci.ics.algebricks.level = WARNING
+#edu.uci.ics.hyracks.algebricks.level = FINEST
edu.uci.ics.hyracks.level = WARNING
diff --git a/hivesterix/hivesterix-optimizer/pom.xml b/hivesterix/hivesterix-optimizer/pom.xml
index 77a5d53..55d6adf 100644
--- a/hivesterix/hivesterix-optimizer/pom.xml
+++ b/hivesterix/hivesterix-optimizer/pom.xml
@@ -4,7 +4,7 @@
<parent>
<artifactId>hivesterix</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<artifactId>hivesterix-optimizer</artifactId>
@@ -30,14 +30,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-translator</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
index 7e4e271..a049f15 100644
--- a/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
+++ b/hivesterix/hivesterix-optimizer/src/main/java/edu/uci/ics/hivesterix/optimizer/rulecollections/HiveRuleCollections.java
@@ -40,9 +40,7 @@
public final static LinkedList<IAlgebraicRewriteRule> NORMALIZATION = new LinkedList<IAlgebraicRewriteRule>();
static {
NORMALIZATION.add(new EliminateSubplanRule());
- NORMALIZATION.add(new IntroduceAggregateCombinerRule());
NORMALIZATION.add(new BreakSelectIntoConjunctsRule());
- NORMALIZATION.add(new IntroduceAggregateCombinerRule());
NORMALIZATION.add(new PushSelectIntoJoinRule());
NORMALIZATION.add(new ExtractGbyExpressionsRule());
NORMALIZATION.add(new RemoveRedundantSelectRule());
@@ -84,6 +82,7 @@
CONSOLIDATION.add(new IntroduceEarlyProjectRule());
CONSOLIDATION.add(new ConsolidateAssignsRule());
CONSOLIDATION.add(new IntroduceGroupByCombinerRule());
+ CONSOLIDATION.add(new IntroduceAggregateCombinerRule());
CONSOLIDATION.add(new RemoveUnusedAssignAndAggregateRule());
}
diff --git a/hivesterix/hivesterix-runtime/pom.xml b/hivesterix/hivesterix-runtime/pom.xml
index fc55273..18b8f62 100644
--- a/hivesterix/hivesterix-runtime/pom.xml
+++ b/hivesterix/hivesterix-runtime/pom.xml
@@ -2,13 +2,13 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-runtime</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<name>hivesterix-runtime</name>
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
@@ -276,35 +276,35 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-serde</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java
deleted file mode 100644
index 760a614..0000000
--- a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/factory/hashfunction/MurmurHash3BinaryHashFunctionFamily.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package edu.uci.ics.hivesterix.runtime.factory.hashfunction;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
-
-public class MurmurHash3BinaryHashFunctionFamily implements IBinaryHashFunctionFamily {
-
- public static final IBinaryHashFunctionFamily INSTANCE = new MurmurHash3BinaryHashFunctionFamily();
-
- private static final long serialVersionUID = 1L;
-
- private MurmurHash3BinaryHashFunctionFamily() {
- }
-
- private static final int C1 = 0xcc9e2d51;
- private static final int C2 = 0x1b873593;
- private static final int C3 = 5;
- private static final int C4 = 0xe6546b64;
- private static final int C5 = 0x85ebca6b;
- private static final int C6 = 0xc2b2ae35;
-
- @Override
- public IBinaryHashFunction createBinaryHashFunction(final int seed) {
- return new IBinaryHashFunction() {
- @Override
- public int hash(byte[] bytes, int offset, int length) {
- int h = seed;
- int p = offset;
- int remain = length;
- while (remain >= 4) {
- int k = (bytes[p] & 0xff) | ((bytes[p + 1] & 0xff) << 8) | ((bytes[p + 2] & 0xff) << 16)
- | ((bytes[p + 3] & 0xff) << 24);
- k *= C1;
- k = Integer.rotateLeft(k, 15);
- k *= C2;
- h ^= k;
- h = Integer.rotateLeft(h, 13);
- h = h * C3 + C4;
- p += 4;
- remain -= 4;
- }
- if (remain > 0) {
- int k = 0;
- for (int i = 0; remain > 0; i += 8) {
- k ^= (bytes[p++] & 0xff) << i;
- remain--;
- }
- k *= C1;
- k = Integer.rotateLeft(k, 15);
- k *= C2;
- h ^= k;
- }
- h ^= length;
- h ^= (h >>> 16);
- h *= C5;
- h ^= (h >>> 13);
- h *= C6;
- h ^= (h >>> 16);
- return h;
- }
- };
- }
-}
\ No newline at end of file
diff --git a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java
index e7a2e79..45302e8 100644
--- a/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java
+++ b/hivesterix/hivesterix-runtime/src/main/java/edu/uci/ics/hivesterix/runtime/provider/HiveBinaryHashFunctionFamilyProvider.java
@@ -1,9 +1,9 @@
package edu.uci.ics.hivesterix.runtime.provider;
-import edu.uci.ics.hivesterix.runtime.factory.hashfunction.MurmurHash3BinaryHashFunctionFamily;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.data.IBinaryHashFunctionFamilyProvider;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
+import edu.uci.ics.hyracks.data.std.accessors.MurmurHash3BinaryHashFunctionFamily;
public class HiveBinaryHashFunctionFamilyProvider implements IBinaryHashFunctionFamilyProvider {
diff --git a/hivesterix/hivesterix-serde/pom.xml b/hivesterix/hivesterix-serde/pom.xml
index 72c9cc9..bf5a9b8 100644
--- a/hivesterix/hivesterix-serde/pom.xml
+++ b/hivesterix/hivesterix-serde/pom.xml
@@ -4,7 +4,7 @@
<parent>
<artifactId>hivesterix</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<artifactId>hivesterix-serde</artifactId>
@@ -44,14 +44,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hivesterix/hivesterix-translator/pom.xml b/hivesterix/hivesterix-translator/pom.xml
index e7ad3ed..7bcbb85 100644
--- a/hivesterix/hivesterix-translator/pom.xml
+++ b/hivesterix/hivesterix-translator/pom.xml
@@ -7,7 +7,7 @@
<parent>
<artifactId>hivesterix</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -37,21 +37,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>algebricks-compiler</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix-runtime</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hivesterix/pom.xml b/hivesterix/pom.xml
index 6ae37b5..3a212ec 100644
--- a/hivesterix/pom.xml
+++ b/hivesterix/pom.xml
@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hivesterix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<packaging>pom</packaging>
<name>hivesterix</name>
diff --git a/hyracks/hyracks-api/pom.xml b/hyracks/hyracks-api/pom.xml
index 45421e1..04a64c1 100644
--- a/hyracks/hyracks-api/pom.xml
+++ b/hyracks/hyracks-api/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -47,7 +47,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-ipc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java
index 1b079d6..6ee719b 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IApplicationContext.java
@@ -16,6 +16,7 @@
import java.io.Serializable;
+import edu.uci.ics.hyracks.api.job.IJobSerializerDeserializerContainer;
import edu.uci.ics.hyracks.api.messages.IMessageBroker;
/**
@@ -35,4 +36,7 @@
public void setMessageBroker(IMessageBroker messageBroker);
public IMessageBroker getMessageBroker();
+
+ public IJobSerializerDeserializerContainer getJobSerializerDeserializerContainer();
+
}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java
index c4b7802..733c7d3 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/ICCApplicationContext.java
@@ -27,11 +27,9 @@
public interface ICCApplicationContext extends IApplicationContext {
/**
* Sets the state that must be distributed by the infrastructure to all the
- * NC application contexts. Any state set by calling this method in
- * the {@link ICCApplicationEntryPoint#start(ICCApplicationContext, String[])} call
- * is made available to all the {@link INCApplicationContext} objects at each Node Controller.
- * The state is then available to be inspected by the application at the NC during or
- * after the {@link INCBootstrap#start()} call.
+ * NC application contexts. Any state set by calling this method in the {@link ICCApplicationEntryPoint#start(ICCApplicationContext, String[])} call is made available to all the {@link INCApplicationContext} objects
+ * at each Node Controller. The state is then available to be inspected by
+ * the application at the NC during or after the {@link INCBootstrap#start()} call.
*
* @param state
* The distributed state
@@ -47,6 +45,14 @@
public void addJobLifecycleListener(IJobLifecycleListener jobLifecycleListener);
/**
+ * A listener that listens to Cluster Lifecycle events at the Cluster
+ * Controller.
+ *
+ * @param jobLifecycleListener
+ */
+ public void addClusterLifecycleListener(IClusterLifecycleListener clusterLifecycleListener);
+
+ /**
* Get the Cluster Controller Context.
*
* @return The Cluster Controller Context.
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IClusterLifecycleListener.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IClusterLifecycleListener.java
new file mode 100644
index 0000000..51db13e
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/application/IClusterLifecycleListener.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.api.application;
+
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * A listener interface for providing notification call backs to events such as a Node Controller joining/leaving the cluster.
+ */
+public interface IClusterLifecycleListener {
+
+ /**
+ * @param nodeId
+ * A unique identifier of a Node Controller
+ * @param ncConfig
+ * A map containing the set of configuration parameters that were used to start the Node Controller
+ */
+ public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration);
+
+ /**
+ * @param deadNodeIds
+ * A set of Node Controller Ids that have left the cluster. The set is not cumulative.
+ */
+ public void notifyNodeFailure(Set<String> deadNodeIds);
+
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
index 88df49f..0467eae 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceFunctions.java
@@ -15,10 +15,13 @@
package edu.uci.ics.hyracks.api.client;
import java.io.Serializable;
+import java.net.URL;
import java.util.EnumSet;
+import java.util.List;
import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
@@ -34,7 +37,9 @@
GET_DATASET_RECORD_DESCRIPTOR,
GET_DATASET_RESULT_LOCATIONS,
WAIT_FOR_COMPLETION,
- GET_NODE_CONTROLLERS_INFO
+ GET_NODE_CONTROLLERS_INFO,
+ CLI_DEPLOY_BINARY,
+ CLI_UNDEPLOY_BINARY
}
public abstract static class Function implements Serializable {
@@ -76,10 +81,18 @@
private final byte[] acggfBytes;
private final EnumSet<JobFlag> jobFlags;
+ private final DeploymentId deploymentId;
public StartJobFunction(byte[] acggfBytes, EnumSet<JobFlag> jobFlags) {
this.acggfBytes = acggfBytes;
this.jobFlags = jobFlags;
+ this.deploymentId = null;
+ }
+
+ public StartJobFunction(DeploymentId deploymentId, byte[] acggfBytes, EnumSet<JobFlag> jobFlags) {
+ this.acggfBytes = acggfBytes;
+ this.jobFlags = jobFlags;
+ this.deploymentId = deploymentId;
}
@Override
@@ -94,6 +107,10 @@
public EnumSet<JobFlag> getJobFlags() {
return jobFlags;
}
+
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
}
public static class GetDatasetDirectoryServiceInfoFunction extends Function {
@@ -200,4 +217,46 @@
return FunctionId.GET_CLUSTER_TOPOLOGY;
}
}
+
+ public static class CliDeployBinaryFunction extends Function {
+ private static final long serialVersionUID = 1L;
+ private final List<URL> binaryURLs;
+ private final DeploymentId deploymentId;
+
+ public CliDeployBinaryFunction(List<URL> binaryURLs, DeploymentId deploymentId) {
+ this.binaryURLs = binaryURLs;
+ this.deploymentId = deploymentId;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.CLI_DEPLOY_BINARY;
+ }
+
+ public List<URL> getBinaryURLs() {
+ return binaryURLs;
+ }
+
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+ }
+
+ public static class CliUnDeployBinaryFunction extends Function {
+ private static final long serialVersionUID = 1L;
+ private final DeploymentId deploymentId;
+
+ public CliUnDeployBinaryFunction(DeploymentId deploymentId) {
+ this.deploymentId = deploymentId;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.CLI_UNDEPLOY_BINARY;
+ }
+
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
index 033fc02..36ceb18 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksClientInterfaceRemoteProxy.java
@@ -14,10 +14,13 @@
*/
package edu.uci.ics.hyracks.api.client;
+import java.net.URL;
import java.util.EnumSet;
+import java.util.List;
import java.util.Map;
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobStatus;
@@ -56,6 +59,13 @@
}
@Override
+ public JobId startJob(DeploymentId deploymentId, byte[] acggfBytes, EnumSet<JobFlag> jobFlags) throws Exception {
+ HyracksClientInterfaceFunctions.StartJobFunction sjf = new HyracksClientInterfaceFunctions.StartJobFunction(
+ deploymentId, acggfBytes, jobFlags);
+ return (JobId) rpci.call(ipcHandle, sjf);
+ }
+
+ @Override
public NetworkAddress getDatasetDirectoryServiceInfo() throws Exception {
HyracksClientInterfaceFunctions.GetDatasetDirectoryServiceInfoFunction gddsf = new HyracksClientInterfaceFunctions.GetDatasetDirectoryServiceInfoFunction();
return (NetworkAddress) rpci.call(ipcHandle, gddsf);
@@ -79,4 +89,18 @@
HyracksClientInterfaceFunctions.GetClusterTopologyFunction gctf = new HyracksClientInterfaceFunctions.GetClusterTopologyFunction();
return (ClusterTopology) rpci.call(ipcHandle, gctf);
}
+
+ @Override
+ public void deployBinary(List<URL> binaryURLs, DeploymentId deploymentId) throws Exception {
+ HyracksClientInterfaceFunctions.CliDeployBinaryFunction dbf = new HyracksClientInterfaceFunctions.CliDeployBinaryFunction(
+ binaryURLs, deploymentId);
+ rpci.call(ipcHandle, dbf);
+ }
+
+ @Override
+ public void unDeployBinary(DeploymentId deploymentId) throws Exception {
+ HyracksClientInterfaceFunctions.CliUnDeployBinaryFunction dbf = new HyracksClientInterfaceFunctions.CliUnDeployBinaryFunction(
+ deploymentId);
+ rpci.call(ipcHandle, dbf);
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
index 8274416..6af8dd9 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/HyracksConnection.java
@@ -14,12 +14,25 @@
*/
package edu.uci.ics.hyracks.api.client;
+import java.io.File;
import java.net.InetSocketAddress;
+import java.net.URL;
+import java.util.ArrayList;
import java.util.EnumSet;
+import java.util.List;
import java.util.Map;
+import java.util.UUID;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.entity.FileEntity;
+import org.apache.http.impl.client.DefaultHttpClient;
import edu.uci.ics.hyracks.api.client.impl.JobSpecificationActivityClusterGraphGeneratorFactory;
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
@@ -107,4 +120,60 @@
public ClusterTopology getClusterTopology() throws Exception {
return hci.getClusterTopology();
}
+
+ @Override
+ public DeploymentId deployBinary(List<String> jars) throws Exception {
+ /** generate a deployment id */
+ DeploymentId deploymentId = new DeploymentId(UUID.randomUUID().toString());
+ List<URL> binaryURLs = new ArrayList<URL>();
+ if (jars != null && jars.size() > 0) {
+ HttpClient hc = new DefaultHttpClient();
+ /** upload jars through a http client one-by-one to the CC server */
+ for (String jar : jars) {
+ int slashIndex = jar.lastIndexOf('/');
+ String fileName = jar.substring(slashIndex + 1);
+ String url = "http://" + ccHost + ":" + ccInfo.getWebPort() + "/applications/"
+ + deploymentId.toString() + "&" + fileName;
+ HttpPut put = new HttpPut(url);
+ put.setEntity(new FileEntity(new File(jar), "application/octet-stream"));
+ HttpResponse response = hc.execute(put);
+ if (response != null) {
+ response.getEntity().consumeContent();
+ }
+ if (response.getStatusLine().getStatusCode() != 200) {
+ hci.unDeployBinary(deploymentId);
+ throw new HyracksException(response.getStatusLine().toString());
+ }
+ /** add the uploaded URL address into the URLs of jars to be deployed at NCs */
+ binaryURLs.add(new URL(url));
+ }
+ }
+ /**deploy the URLs to the CC and NCs*/
+ hci.deployBinary(binaryURLs, deploymentId);
+ return deploymentId;
+ }
+
+ @Override
+ public void unDeployBinary(DeploymentId deploymentId) throws Exception {
+ hci.unDeployBinary(deploymentId);
+ }
+
+ @Override
+ public JobId startJob(DeploymentId deploymentId, JobSpecification jobSpec) throws Exception {
+ return startJob(deploymentId, jobSpec, EnumSet.noneOf(JobFlag.class));
+ }
+
+ @Override
+ public JobId startJob(DeploymentId deploymentId, JobSpecification jobSpec, EnumSet<JobFlag> jobFlags)
+ throws Exception {
+ JobSpecificationActivityClusterGraphGeneratorFactory jsacggf = new JobSpecificationActivityClusterGraphGeneratorFactory(
+ jobSpec);
+ return startJob(deploymentId, jsacggf, jobFlags);
+ }
+
+ @Override
+ public JobId startJob(DeploymentId deploymentId, IActivityClusterGraphGeneratorFactory acggf,
+ EnumSet<JobFlag> jobFlags) throws Exception {
+ return hci.startJob(deploymentId, JavaSerializationUtils.serialize(acggf), jobFlags);
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
index fe4094f..41b07d7 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientConnection.java
@@ -15,9 +15,11 @@
package edu.uci.ics.hyracks.api.client;
import java.util.EnumSet;
+import java.util.List;
import java.util.Map;
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
@@ -110,4 +112,60 @@
* @throws Exception
*/
public ClusterTopology getClusterTopology() throws Exception;
+
+ /**
+ * Deploy the user-defined jars to the cluster
+ *
+ * @param jars
+ * a list of user-defined jars
+ */
+ public DeploymentId deployBinary(List<String> jars) throws Exception;
+
+ /**
+ * undeploy a certain deployment
+ *
+ * @param jars
+ * a list of user-defined jars
+ */
+ public void unDeployBinary(DeploymentId deploymentId) throws Exception;
+
+ /**
+ * Start the specified Job.
+ *
+ * @param deploymentId
+ * the id of the specific deployment
+ * @param jobSpec
+ * Job Specification
+ * @throws Exception
+ */
+ public JobId startJob(DeploymentId deploymentId, JobSpecification jobSpec) throws Exception;
+
+ /**
+ * Start the specified Job.
+ *
+ * @param deploymentId
+ * the id of the specific deployment
+ * @param jobSpec
+ * Job Specification
+ * @param jobFlags
+ * Flags
+ * @throws Exception
+ */
+ public JobId startJob(DeploymentId deploymentId, JobSpecification jobSpec, EnumSet<JobFlag> jobFlags)
+ throws Exception;
+
+ /**
+ * Start the specified Job.
+ *
+ * @param deploymentId
+ * the id of the specific deployment
+ * @param acggf
+ * Activity Cluster Graph Generator Factory
+ * @param jobFlags
+ * Flags
+ * @throws Exception
+ */
+ public JobId startJob(DeploymentId deploymentId, IActivityClusterGraphGeneratorFactory acggf,
+ EnumSet<JobFlag> jobFlags) throws Exception;
+
}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
index 6fdf638..aabc351 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/client/IHyracksClientInterface.java
@@ -14,10 +14,13 @@
*/
package edu.uci.ics.hyracks.api.client;
+import java.net.URL;
import java.util.EnumSet;
+import java.util.List;
import java.util.Map;
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobStatus;
@@ -37,4 +40,10 @@
public Map<String, NodeControllerInfo> getNodeControllersInfo() throws Exception;
public ClusterTopology getClusterTopology() throws Exception;
+
+ public void deployBinary(List<URL> binaryURLs, DeploymentId deploymentId) throws Exception;
+
+ public void unDeployBinary(DeploymentId deploymentId) throws Exception;
+
+ public JobId startJob(DeploymentId deploymentId, byte[] acggfBytes, EnumSet<JobFlag> jobFlags) throws Exception;
}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java
index fad4300..f8c5b6a 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksJobletContext.java
@@ -28,4 +28,8 @@
public ICounterContext getCounterContext();
public Object getGlobalJobData();
+
+ public Class<?> loadClass(String className);
+
+ public ClassLoader getClassLoader();
}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
index a2ee977..f0b47b1 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/context/IHyracksTaskContext.java
@@ -16,6 +16,7 @@
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
import edu.uci.ics.hyracks.api.job.profiling.counters.ICounterContext;
@@ -31,5 +32,5 @@
public IDatasetPartitionManager getDatasetPartitionManager();
- public void sendApplicationMessageToCC(byte[] message, String nodeId) throws Exception;
+ public void sendApplicationMessageToCC(byte[] message, DeploymentId deploymendId, String nodeId) throws Exception;
}
\ No newline at end of file
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/DatasetJobRecord.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/DatasetJobRecord.java
index c3bf77a..563ee1b 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/DatasetJobRecord.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/DatasetJobRecord.java
@@ -28,7 +28,7 @@
private Status status;
- private List<Throwable> caughtExceptions;
+ private List<Exception> exceptions;
public DatasetJobRecord() {
this.status = Status.RUNNING;
@@ -46,16 +46,16 @@
status = Status.FAILED;
}
- public void fail(List<Throwable> caughtExceptions) {
+ public void fail(List<Exception> exceptions) {
status = Status.FAILED;
- this.caughtExceptions = caughtExceptions;
+ this.exceptions = exceptions;
}
public Status getStatus() {
return status;
}
- public List<Throwable> getCaughtExceptions() {
- return caughtExceptions;
+ public List<Exception> getExceptions() {
+ return exceptions;
}
}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetDirectoryService.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetDirectoryService.java
index ccb18b3..d152cf5 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetDirectoryService.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetDirectoryService.java
@@ -30,7 +30,7 @@
public void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition);
- public void reportJobFailure(JobId jobId, List<Throwable> caughtExceptions);
+ public void reportJobFailure(JobId jobId, List<Exception> exceptions);
public Status getResultStatus(JobId jobId, ResultSetId rsId) throws HyracksDataException;
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionManager.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionManager.java
index ae38c7f..dea077d 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionManager.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionManager.java
@@ -29,7 +29,10 @@
public void reportPartitionFailure(JobId jobId, ResultSetId resultSetId, int partition) throws HyracksException;
- public void initializeDatasetPartitionReader(JobId jobId, int partition, IFrameWriter noc) throws HyracksException;
+ public void initializeDatasetPartitionReader(JobId jobId, ResultSetId resultSetId, int partition, IFrameWriter noc)
+ throws HyracksException;
+
+ public void abortReader(JobId jobId);
public IWorkspaceFileFactory getFileFactory();
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionWriter.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionWriter.java
deleted file mode 100644
index 42dc157..0000000
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionWriter.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.api.dataset;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public interface IDatasetPartitionWriter extends IFrameWriter {
- public Page returnPage() throws HyracksDataException;
-}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceConnection.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceConnection.java
index d49d5cd..f29356b 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceConnection.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceConnection.java
@@ -14,7 +14,7 @@
*/
package edu.uci.ics.hyracks.api.dataset;
-import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
import edu.uci.ics.hyracks.api.job.JobId;
public interface IHyracksDatasetDirectoryServiceConnection {
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceInterface.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceInterface.java
index ba21a84..3fe4ada 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceInterface.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetDirectoryServiceInterface.java
@@ -14,7 +14,7 @@
*/
package edu.uci.ics.hyracks.api.dataset;
-import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
import edu.uci.ics.hyracks.api.job.JobId;
public interface IHyracksDatasetDirectoryServiceInterface {
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetReader.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetReader.java
index b928a49..c397a94 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetReader.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IHyracksDatasetReader.java
@@ -16,7 +16,7 @@
import java.nio.ByteBuffer;
-import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
public interface IHyracksDatasetReader {
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/deployment/DeploymentId.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/deployment/DeploymentId.java
new file mode 100644
index 0000000..8d8d728
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/deployment/DeploymentId.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.deployment;
+
+import java.io.Serializable;
+
+/**
+ * The representation of a deployment id
+ *
+ * @author yingyib
+ *
+ */
+public class DeploymentId implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ private final String deploymentKey;
+
+ public DeploymentId(String deploymentKey) {
+ this.deploymentKey = deploymentKey;
+ }
+
+ public int hashCode() {
+ return deploymentKey.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof DeploymentId)) {
+ return false;
+ }
+ return ((DeploymentId) o).deploymentKey.equals(deploymentKey);
+ }
+
+ @Override
+ public String toString() {
+ return deploymentKey;
+ }
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobSerializerDeserializer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobSerializerDeserializer.java
new file mode 100644
index 0000000..a105953
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobSerializerDeserializer.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.job;
+
+import java.io.Serializable;
+import java.net.URL;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+
+/**
+ * The serializer/deserializer/classloader interface for job/task information such as job specifications, activity graphs and so on.
+ *
+ * @author yingyib
+ */
+public interface IJobSerializerDeserializer {
+
+ /**
+ * Deserialize the bytes to an object
+ *
+ * @param bytes
+ * the binary content of an object
+ * @return the deserialized object
+ * @throws HyracksException
+ */
+ public Object deserialize(byte[] bytes) throws HyracksException;
+
+ /**
+ * Serialize a object into bytes
+ *
+ * @param object
+ * a Serializable Java object
+ * @return
+ * the byte array which contains the binary content of the input object
+ * @throws HyracksException
+ */
+ public byte[] serialize(Serializable object) throws HyracksException;
+
+ /**
+ * Load a class by its name
+ *
+ * @param className
+ * the name of the class
+ * @return
+ * @throws HyracksException
+ */
+ public Class<?> loadClass(String className) throws HyracksException;
+
+ /**
+ *
+ * @param binaryURLs
+ * @throws HyracksException
+ */
+ public void addClassPathURLs(List<URL> binaryURLs) throws HyracksException;
+
+ public ClassLoader getClassLoader() throws HyracksException;
+
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobSerializerDeserializerContainer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobSerializerDeserializerContainer.java
new file mode 100644
index 0000000..0ce2346
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/IJobSerializerDeserializerContainer.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.job;
+
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+
+public interface IJobSerializerDeserializerContainer {
+
+ /**
+ * Get the IJobSerializerDeserializer implementation instance for a specific deployment id
+ *
+ * @param deploymentId
+ * @return
+ */
+ public IJobSerializerDeserializer getJobSerializerDeerializer(DeploymentId deploymentId);
+
+ /**
+ * Add a deployment with the job serializer deserializer
+ *
+ * @param deploymentId
+ * @param jobSerDe
+ */
+ public void addJobSerializerDeserializer(DeploymentId deploymentId, IJobSerializerDeserializer jobSerDe);
+
+ /**
+ * Remove a deployment
+ *
+ * @param deploymentId
+ */
+ public void removeJobSerializerDeserializer(DeploymentId deploymentId);
+
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSerializerDeserializer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSerializerDeserializer.java
new file mode 100644
index 0000000..0ea4309
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSerializerDeserializer.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.job;
+
+import java.io.Serializable;
+import java.net.URL;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.util.JavaSerializationUtils;
+
+public class JobSerializerDeserializer implements IJobSerializerDeserializer {
+
+ @Override
+ public Object deserialize(byte[] jsBytes) throws HyracksException {
+ try {
+ return JavaSerializationUtils.deserialize(jsBytes);
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ @Override
+ public byte[] serialize(Serializable obj) throws HyracksException {
+ try {
+ return JavaSerializationUtils.serialize(obj);
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ @Override
+ public void addClassPathURLs(List<URL> binaryURLs) {
+ throw new UnsupportedOperationException("Not supported by " + this.getClass().getName());
+ }
+
+ @Override
+ public Class<?> loadClass(String className) throws HyracksException {
+ try {
+ return this.getClass().getClassLoader().loadClass(className);
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ @Override
+ public ClassLoader getClassLoader() throws HyracksException {
+ return this.getClass().getClassLoader();
+ }
+
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSerializerDeserializerContainer.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSerializerDeserializerContainer.java
new file mode 100644
index 0000000..35a1e8b
--- /dev/null
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/job/JobSerializerDeserializerContainer.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.api.job;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+
+public class JobSerializerDeserializerContainer implements IJobSerializerDeserializerContainer {
+
+ private IJobSerializerDeserializer defaultJobSerDe = new JobSerializerDeserializer();
+ private Map<DeploymentId, IJobSerializerDeserializer> jobSerializerDeserializerMap = new HashMap<DeploymentId, IJobSerializerDeserializer>();
+
+ @Override
+ public synchronized IJobSerializerDeserializer getJobSerializerDeerializer(DeploymentId deploymentId) {
+ if (deploymentId == null) {
+ return defaultJobSerDe;
+ }
+ IJobSerializerDeserializer jobSerDe = jobSerializerDeserializerMap.get(deploymentId);
+ return jobSerDe;
+ }
+
+ @Override
+ public synchronized void addJobSerializerDeserializer(DeploymentId deploymentId, IJobSerializerDeserializer jobSerDe) {
+ jobSerializerDeserializerMap.put(deploymentId, jobSerDe);
+ }
+
+ @Override
+ public synchronized void removeJobSerializerDeserializer(DeploymentId deploymentId) {
+ jobSerializerDeserializerMap.remove(deploymentId);
+ }
+
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java
index 764348f..a3c8aed 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java
+++ b/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/util/JavaSerializationUtils.java
@@ -33,6 +33,19 @@
return baos.toByteArray();
}
+ public static byte[] serialize(Serializable jobSpec, ClassLoader classLoader) throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ObjectOutputStream oos = new ObjectOutputStream(baos);
+ ClassLoader ctxCL = Thread.currentThread().getContextClassLoader();
+ try {
+ Thread.currentThread().setContextClassLoader(classLoader);
+ oos.writeObject(jobSpec);
+ return baos.toByteArray();
+ } finally {
+ Thread.currentThread().setContextClassLoader(ctxCL);
+ }
+ }
+
public static Object deserialize(byte[] bytes) throws IOException, ClassNotFoundException {
if (bytes == null) {
return null;
@@ -55,6 +68,10 @@
}
}
+ public static Class<?> loadClass(String className) throws IOException, ClassNotFoundException {
+ return Class.forName(className);
+ }
+
private static class ClassLoaderObjectInputStream extends ObjectInputStream {
private ClassLoader classLoader;
diff --git a/hyracks/hyracks-client/pom.xml b/hyracks/hyracks-client/pom.xml
index 4febc6e..76a2659 100644
--- a/hyracks/hyracks-client/pom.xml
+++ b/hyracks/hyracks-client/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,22 +25,22 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-net</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-comm</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceConnection.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceConnection.java
index 095fd7d..c882448 100644
--- a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceConnection.java
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceConnection.java
@@ -17,7 +17,7 @@
import java.net.InetSocketAddress;
import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
-import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceConnection;
import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceInterface;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
index 47cdf97..9c3b918 100644
--- a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
@@ -16,7 +16,7 @@
import edu.uci.ics.hyracks.api.client.HyracksClientInterfaceFunctions;
import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
-import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceInterface;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.job.JobId;
diff --git a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetReader.java b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetReader.java
index acdbf3d..97d3744 100644
--- a/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetReader.java
+++ b/hyracks/hyracks-client/src/main/java/edu/uci/ics/hyracks/client/dataset/HyracksDatasetReader.java
@@ -26,7 +26,7 @@
import edu.uci.ics.hyracks.api.channels.IInputChannel;
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
-import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord.Status;
+import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
import edu.uci.ics.hyracks.api.dataset.IDatasetInputChannelMonitor;
import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetDirectoryServiceConnection;
import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
@@ -101,7 +101,7 @@
knownRecords);
lastReadPartition = 0;
resultChannel = new DatasetNetworkInputChannel(netManager,
- getSocketAddress(knownRecords[lastReadPartition]), jobId, lastReadPartition,
+ getSocketAddress(knownRecords[lastReadPartition]), jobId, resultSetId, lastReadPartition,
NUM_READ_BUFFERS);
lastMonitor = getMonitor(lastReadPartition);
resultChannel.open(datasetClientCtx);
@@ -114,7 +114,7 @@
while (readSize <= 0 && !(isLastPartitionReadComplete())) {
synchronized (lastMonitor) {
- while (lastMonitor.getNFramesAvailable() <= 0 && !lastMonitor.eosReached()) {
+ while (lastMonitor.getNFramesAvailable() <= 0 && !lastMonitor.eosReached() && !lastMonitor.failed()) {
try {
lastMonitor.wait();
} catch (InterruptedException e) {
@@ -123,6 +123,9 @@
}
}
+ if (lastMonitor.failed()) {
+ throw new HyracksDataException("Job Failed.");
+ }
if (isPartitionReadComplete(lastMonitor)) {
knownRecords[lastReadPartition].readEOS();
if ((lastReadPartition == knownRecords.length - 1)) {
@@ -131,23 +134,17 @@
try {
lastReadPartition++;
while (knownRecords[lastReadPartition] == null) {
- try {
- knownRecords = datasetDirectoryServiceConnection.getDatasetResultLocations(jobId,
- resultSetId, knownRecords);
- } catch (Exception e) {
- // Do nothing here.
- }
+ knownRecords = datasetDirectoryServiceConnection.getDatasetResultLocations(jobId,
+ resultSetId, knownRecords);
}
resultChannel = new DatasetNetworkInputChannel(netManager,
- getSocketAddress(knownRecords[lastReadPartition]), jobId, lastReadPartition,
- NUM_READ_BUFFERS);
+ getSocketAddress(knownRecords[lastReadPartition]), jobId, resultSetId,
+ lastReadPartition, NUM_READ_BUFFERS);
lastMonitor = getMonitor(lastReadPartition);
resultChannel.open(datasetClientCtx);
resultChannel.registerMonitor(lastMonitor);
- } catch (HyracksException e) {
- throw new HyracksDataException(e);
- } catch (UnknownHostException e) {
+ } catch (Exception e) {
throw new HyracksDataException(e);
}
}
diff --git a/hyracks/hyracks-comm/pom.xml b/hyracks/hyracks-comm/pom.xml
index 647faa6..036e507 100644
--- a/hyracks/hyracks-comm/pom.xml
+++ b/hyracks/hyracks-comm/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,12 +25,12 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-net</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/DatasetNetworkInputChannel.java b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/DatasetNetworkInputChannel.java
index fac2949..1ab315b 100644
--- a/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/DatasetNetworkInputChannel.java
+++ b/hyracks/hyracks-comm/src/main/java/edu/uci/ics/hyracks/comm/channels/DatasetNetworkInputChannel.java
@@ -24,6 +24,7 @@
import edu.uci.ics.hyracks.api.channels.IInputChannel;
import edu.uci.ics.hyracks.api.channels.IInputChannelMonitor;
import edu.uci.ics.hyracks.api.context.IHyracksCommonContext;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.net.buffers.IBufferAcceptor;
@@ -41,6 +42,8 @@
private final JobId jobId;
+ private final ResultSetId resultSetId;
+
private final int partition;
private final Queue<ByteBuffer> fullQueue;
@@ -54,10 +57,11 @@
private Object attachment;
public DatasetNetworkInputChannel(IChannelConnectionFactory netManager, SocketAddress remoteAddress, JobId jobId,
- int partition, int nBuffers) {
+ ResultSetId resultSetId, int partition, int nBuffers) {
this.netManager = netManager;
this.remoteAddress = remoteAddress;
this.jobId = jobId;
+ this.resultSetId = resultSetId;
this.partition = partition;
fullQueue = new ArrayDeque<ByteBuffer>(nBuffers);
this.nBuffers = nBuffers;
@@ -103,6 +107,7 @@
}
ByteBuffer writeBuffer = ByteBuffer.allocate(INITIAL_MESSAGE_SIZE);
writeBuffer.putLong(jobId.getId());
+ writeBuffer.putLong(resultSetId.getId());
writeBuffer.putInt(partition);
writeBuffer.flip();
if (LOGGER.isLoggable(Level.FINE)) {
diff --git a/hyracks/hyracks-control/hyracks-control-cc/pom.xml b/hyracks/hyracks-control/hyracks-control-cc/pom.xml
index 2af1a85..7782346 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/pom.xml
+++ b/hyracks/hyracks-control/hyracks-control-cc/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -26,7 +26,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
index f5227e5..5294428 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/ClusterControllerService.java
@@ -19,6 +19,7 @@
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.LinkedHashMap;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
@@ -40,6 +41,7 @@
import edu.uci.ics.hyracks.api.dataset.DatasetDirectoryRecord;
import edu.uci.ics.hyracks.api.dataset.DatasetJobRecord.Status;
import edu.uci.ics.hyracks.api.dataset.IDatasetDirectoryService;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobStatus;
import edu.uci.ics.hyracks.api.topology.ClusterTopology;
@@ -49,6 +51,8 @@
import edu.uci.ics.hyracks.control.cc.job.JobRun;
import edu.uci.ics.hyracks.control.cc.web.WebServer;
import edu.uci.ics.hyracks.control.cc.work.ApplicationMessageWork;
+import edu.uci.ics.hyracks.control.cc.work.CliDeployBinaryWork;
+import edu.uci.ics.hyracks.control.cc.work.CliUnDeployBinaryWork;
import edu.uci.ics.hyracks.control.cc.work.GetDatasetDirectoryServiceInfoWork;
import edu.uci.ics.hyracks.control.cc.work.GetIpAddressNodeNameMapWork;
import edu.uci.ics.hyracks.control.cc.work.GetJobStatusWork;
@@ -58,6 +62,7 @@
import edu.uci.ics.hyracks.control.cc.work.JobStartWork;
import edu.uci.ics.hyracks.control.cc.work.JobletCleanupNotificationWork;
import edu.uci.ics.hyracks.control.cc.work.NodeHeartbeatWork;
+import edu.uci.ics.hyracks.control.cc.work.NotifyDeployBinaryWork;
import edu.uci.ics.hyracks.control.cc.work.RegisterNodeWork;
import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionAvailibilityWork;
import edu.uci.ics.hyracks.control.cc.work.RegisterPartitionRequestWork;
@@ -73,6 +78,7 @@
import edu.uci.ics.hyracks.control.common.AbstractRemoteService;
import edu.uci.ics.hyracks.control.common.context.ServerContext;
import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentRun;
import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions;
import edu.uci.ics.hyracks.control.common.ipc.CCNCFunctions.Function;
import edu.uci.ics.hyracks.control.common.logs.LogFile;
@@ -112,6 +118,8 @@
private final Map<JobId, JobRun> runMapArchive;
+ private final Map<JobId, List<Exception>> runMapHistory;
+
private final WorkQueue workQueue;
private final ExecutorService executor;
@@ -126,6 +134,8 @@
private long jobCounter;
+ private final Map<DeploymentId, DeploymentRun> deploymentRunMap;
+
public ClusterControllerService(final CCConfig ccConfig) throws Exception {
this.ccConfig = ccConfig;
File jobLogFolder = new File(ccConfig.ccRoot, "logs/jobs");
@@ -149,6 +159,15 @@
return size() > ccConfig.jobHistorySize;
}
};
+ runMapHistory = new LinkedHashMap<JobId, List<Exception>>() {
+ private static final long serialVersionUID = 1L;
+ /** history size + 1 is for the case when history size = 0 */
+ private int allowedSize = 100 * (ccConfig.jobHistorySize + 1);
+
+ protected boolean removeEldestEntry(Map.Entry<JobId, List<Exception>> eldest) {
+ return size() > allowedSize;
+ }
+ };
workQueue = new WorkQueue();
this.timer = new Timer(true);
final ClusterTopology topology = computeClusterTopology(ccConfig);
@@ -172,6 +191,8 @@
sweeper = new DeadNodeSweeper();
datasetDirectoryService = new DatasetDirectoryService(ccConfig.jobHistorySize);
jobCounter = 0;
+
+ deploymentRunMap = new HashMap<DeploymentId, DeploymentRun>();
}
private static ClusterTopology computeClusterTopology(CCConfig ccConfig) throws Exception {
@@ -243,6 +264,10 @@
return runMapArchive;
}
+ public Map<JobId, List<Exception>> getRunHistory() {
+ return runMapHistory;
+ }
+
public Map<String, Set<String>> getIpAddressNodeNameMap() {
return ipAddressNodeNameMap;
}
@@ -326,8 +351,8 @@
case START_JOB: {
HyracksClientInterfaceFunctions.StartJobFunction sjf = (HyracksClientInterfaceFunctions.StartJobFunction) fn;
JobId jobId = createJobId();
- workQueue.schedule(new JobStartWork(ClusterControllerService.this, sjf.getACGGFBytes(), sjf
- .getJobFlags(), jobId, new IPCResponder<JobId>(handle, mid)));
+ workQueue.schedule(new JobStartWork(ClusterControllerService.this, sjf.getDeploymentId(), sjf
+ .getACGGFBytes(), sjf.getJobFlags(), jobId, new IPCResponder<JobId>(handle, mid)));
return;
}
@@ -373,6 +398,20 @@
}
return;
}
+
+ case CLI_DEPLOY_BINARY: {
+ HyracksClientInterfaceFunctions.CliDeployBinaryFunction dbf = (HyracksClientInterfaceFunctions.CliDeployBinaryFunction) fn;
+ workQueue.schedule(new CliDeployBinaryWork(ClusterControllerService.this, dbf.getBinaryURLs(), dbf
+ .getDeploymentId(), new IPCResponder<DeploymentId>(handle, mid)));
+ return;
+ }
+
+ case CLI_UNDEPLOY_BINARY: {
+ HyracksClientInterfaceFunctions.CliUnDeployBinaryFunction udbf = (HyracksClientInterfaceFunctions.CliUnDeployBinaryFunction) fn;
+ workQueue.schedule(new CliUnDeployBinaryWork(ClusterControllerService.this, udbf.getDeploymentId(),
+ new IPCResponder<DeploymentId>(handle, mid)));
+ return;
+ }
}
try {
handle.send(mid, null, new IllegalArgumentException("Unknown function " + fn.getFunctionId()));
@@ -414,6 +453,13 @@
return;
}
+ case NOTIFY_DEPLOY_BINARY: {
+ CCNCFunctions.NotifyDeployBinaryFunction ndbf = (CCNCFunctions.NotifyDeployBinaryFunction) fn;
+ workQueue.schedule(new NotifyDeployBinaryWork(ClusterControllerService.this,
+ ndbf.getDeploymentId(), ndbf.getNodeId(), ndbf.getDeploymentStatus()));
+ return;
+ }
+
case REPORT_PROFILE: {
CCNCFunctions.ReportProfileFunction rpf = (CCNCFunctions.ReportProfileFunction) fn;
workQueue.schedule(new ReportProfilesWork(ClusterControllerService.this, rpf.getProfiles()));
@@ -429,7 +475,7 @@
case NOTIFY_TASK_FAILURE: {
CCNCFunctions.NotifyTaskFailureFunction ntff = (CCNCFunctions.NotifyTaskFailureFunction) fn;
workQueue.schedule(new TaskFailureWork(ClusterControllerService.this, ntff.getJobId(), ntff
- .getTaskId(), ntff.getDetails(), ntff.getDetails(), ntff.getCaughtExceptions()));
+ .getTaskId(), ntff.getNodeId(), ntff.getExceptions()));
return;
}
@@ -472,7 +518,7 @@
case SEND_APPLICATION_MESSAGE: {
CCNCFunctions.SendApplicationMessageFunction rsf = (CCNCFunctions.SendApplicationMessageFunction) fn;
workQueue.schedule(new ApplicationMessageWork(ClusterControllerService.this, rsf.getMessage(), rsf
- .getNodeId()));
+ .getDeploymentId(), rsf.getNodeId()));
return;
}
@@ -496,4 +542,32 @@
LOGGER.warning("Unknown function: " + fn.getFunctionId());
}
}
-}
\ No newline at end of file
+
+ /**
+ * Add a deployment run
+ *
+ * @param deploymentKey
+ * @param nodeControllerIds
+ */
+ public synchronized void addDeploymentRun(DeploymentId deploymentKey, DeploymentRun dRun) {
+ deploymentRunMap.put(deploymentKey, dRun);
+ }
+
+ /**
+ * Get a deployment run
+ *
+ * @param deploymentKey
+ */
+ public synchronized DeploymentRun getDeploymentRun(DeploymentId deploymentKey) {
+ return deploymentRunMap.get(deploymentKey);
+ }
+
+ /**
+ * Remove a deployment run
+ *
+ * @param deploymentKey
+ */
+ public synchronized void removeDeploymentRun(DeploymentId deploymentKey) {
+ deploymentRunMap.remove(deploymentKey);
+ }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java
index 7e1581a..f884c6b 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/application/CCApplicationContext.java
@@ -19,9 +19,11 @@
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
+import java.util.Map;
import java.util.Set;
import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
+import edu.uci.ics.hyracks.api.application.IClusterLifecycleListener;
import edu.uci.ics.hyracks.api.context.ICCContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
@@ -41,6 +43,7 @@
protected IResultCallback<Object> deinitializationCallback;
private List<IJobLifecycleListener> jobLifecycleListeners;
+ private List<IClusterLifecycleListener> clusterLifecycleListeners;
public CCApplicationContext(ServerContext serverCtx, ICCContext ccContext) throws IOException {
super(serverCtx);
@@ -48,6 +51,7 @@
initPendingNodeIds = new HashSet<String>();
deinitPendingNodeIds = new HashSet<String>();
jobLifecycleListeners = new ArrayList<IJobLifecycleListener>();
+ clusterLifecycleListeners = new ArrayList<IClusterLifecycleListener>();
}
public ICCContext getCCContext() {
@@ -82,4 +86,21 @@
l.notifyJobCreation(jobId, acggf);
}
}
+
+ @Override
+ public void addClusterLifecycleListener(IClusterLifecycleListener clusterLifecycleListener) {
+ clusterLifecycleListeners.add(clusterLifecycleListener);
+ }
+
+ public void notifyNodeJoin(String nodeId, Map<String, String> ncConfiguration) throws HyracksException {
+ for (IClusterLifecycleListener l : clusterLifecycleListeners) {
+ l.notifyNodeJoin(nodeId, ncConfiguration);
+ }
+ }
+
+ public void notifyNodeFailure(Set<String> deadNodeIds) {
+ for (IClusterLifecycleListener l : clusterLifecycleListeners) {
+ l.notifyNodeFailure(deadNodeIds);
+ }
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/dataset/DatasetDirectoryService.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/dataset/DatasetDirectoryService.java
index 03bd96d..36082b0 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/dataset/DatasetDirectoryService.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/dataset/DatasetDirectoryService.java
@@ -101,7 +101,7 @@
records[partition].writeEOS();
for (DatasetDirectoryRecord record : records) {
- if (record.getStatus() == DatasetDirectoryRecord.Status.SUCCESS) {
+ if ((record != null) && (record.getStatus() == DatasetDirectoryRecord.Status.SUCCESS)) {
successCount++;
}
}
@@ -114,15 +114,17 @@
@Override
public synchronized void reportResultPartitionFailure(JobId jobId, ResultSetId rsId, int partition) {
DatasetJobRecord djr = jobResultLocations.get(jobId);
- djr.fail();
+ if (djr != null) {
+ djr.fail();
+ }
notifyAll();
}
@Override
- public synchronized void reportJobFailure(JobId jobId, List<Throwable> caughtExceptions) {
+ public synchronized void reportJobFailure(JobId jobId, List<Exception> exceptions) {
DatasetJobRecord djr = jobResultLocations.get(jobId);
if (djr != null) {
- djr.fail(caughtExceptions);
+ djr.fail(exceptions);
}
notifyAll();
}
@@ -196,7 +198,7 @@
}
if (djr.getStatus() == Status.FAILED) {
- List<Throwable> caughtExceptions = djr.getCaughtExceptions();
+ List<Exception> caughtExceptions = djr.getExceptions();
if (caughtExceptions == null) {
throw new HyracksDataException("Job failed.");
} else {
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java
index d4e06a8..3a6dc26 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/JobRun.java
@@ -29,7 +29,7 @@
import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
import edu.uci.ics.hyracks.api.dataflow.TaskId;
import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.job.ActivityCluster;
import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
@@ -46,6 +46,8 @@
import edu.uci.ics.hyracks.control.common.job.profiling.om.JobProfile;
public class JobRun implements IJobStatusConditionVariable {
+ private final DeploymentId deploymentId;
+
private final JobId jobId;
private final IActivityClusterGraphGenerator acgg;
@@ -76,14 +78,15 @@
private JobStatus status;
- private Exception exception;
+ private List<Exception> exceptions;
private JobStatus pendingStatus;
- private Exception pendingException;
+ private List<Exception> pendingExceptions;
- public JobRun(ClusterControllerService ccs, JobId jobId, IActivityClusterGraphGenerator acgg,
- EnumSet<JobFlag> jobFlags) {
+ public JobRun(ClusterControllerService ccs, DeploymentId deploymentId, JobId jobId,
+ IActivityClusterGraphGenerator acgg, EnumSet<JobFlag> jobFlags) {
+ this.deploymentId = deploymentId;
this.jobId = jobId;
this.acgg = acgg;
this.acg = acgg.initialize();
@@ -97,6 +100,10 @@
connectorPolicyMap = new HashMap<ConnectorDescriptorId, IConnectorPolicy>();
}
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+
public JobId getJobId() {
return jobId;
}
@@ -117,9 +124,9 @@
return pmm;
}
- public synchronized void setStatus(JobStatus status, Exception exception) {
+ public synchronized void setStatus(JobStatus status, List<Exception> exceptions) {
this.status = status;
- this.exception = exception;
+ this.exceptions = exceptions;
notifyAll();
}
@@ -127,21 +134,21 @@
return status;
}
- public synchronized Exception getException() {
- return exception;
+ public synchronized List<Exception> getExceptions() {
+ return exceptions;
}
- public void setPendingStatus(JobStatus status, Exception exception) {
+ public void setPendingStatus(JobStatus status, List<Exception> exceptions) {
this.pendingStatus = status;
- this.pendingException = exception;
+ this.pendingExceptions = exceptions;
}
public JobStatus getPendingStatus() {
return pendingStatus;
}
- public synchronized Exception getPendingException() {
- return pendingException;
+ public synchronized List<Exception> getPendingExceptions() {
+ return pendingExceptions;
}
public long getCreateTime() {
@@ -173,12 +180,18 @@
while (status != JobStatus.TERMINATED && status != JobStatus.FAILURE) {
wait();
}
- if (exception != null) {
- if (exception instanceof HyracksDataException) {
- throw exception;
- } else {
- throw new HyracksException(exception);
+ if (exceptions != null && !exceptions.isEmpty()) {
+ StringBuilder buffer = new StringBuilder();
+ buffer.append("Job failed on account of:\n");
+ for (Exception e : exceptions) {
+ buffer.append(e.getMessage()).append('\n');
}
+ HyracksException he;
+ he = new HyracksException(buffer.toString(), exceptions.get(0));
+ for (int i = 1; i < exceptions.size(); ++i) {
+ he.addSuppressed(exceptions.get(i));
+ }
+ throw he;
}
}
@@ -330,9 +343,9 @@
taskAttempt.put("node-id", ta.getNodeId());
taskAttempt.put("start-time", ta.getStartTime());
taskAttempt.put("end-time", ta.getEndTime());
- String failureDetails = ta.getFailureDetails();
- if (failureDetails != null) {
- taskAttempt.put("failure-details", failureDetails);
+ List<Exception> exceptions = ta.getExceptions();
+ if (exceptions != null && !exceptions.isEmpty()) {
+ taskAttempt.put("failure-details", exceptions);
}
taskAttempts.put(taskAttempt);
}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java
index 7c0dd57..b323501 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/job/TaskAttempt.java
@@ -14,6 +14,8 @@
*/
package edu.uci.ics.hyracks.control.cc.job;
+import java.util.List;
+
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
public class TaskAttempt {
@@ -35,7 +37,7 @@
private TaskStatus status;
- private String failureDetails;
+ private List<Exception> exceptions;
private long startTime;
@@ -73,13 +75,13 @@
return status;
}
- public String getFailureDetails() {
- return failureDetails;
+ public List<Exception> getExceptions() {
+ return exceptions;
}
- public void setStatus(TaskStatus status, String details) {
+ public void setStatus(TaskStatus status, List<Exception> exceptions) {
this.status = status;
- this.failureDetails = details;
+ this.exceptions = exceptions;
}
public long getStartTime() {
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java
index da1683d..d09b641 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/scheduler/JobScheduler.java
@@ -16,6 +16,7 @@
import java.util.ArrayList;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -34,6 +35,7 @@
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataflow.TaskId;
import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.job.ActivityCluster;
import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
@@ -427,6 +429,7 @@
}
private void startTasks(Map<String, List<TaskAttemptDescriptor>> taskAttemptMap) throws HyracksException {
+ final DeploymentId deploymentId = jobRun.getDeploymentId();
final JobId jobId = jobRun.getJobId();
final ActivityClusterGraph acg = jobRun.getActivityClusterGraph();
final Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies = new HashMap<ConnectorDescriptorId, IConnectorPolicy>(
@@ -443,8 +446,8 @@
}
try {
byte[] jagBytes = changed ? JavaSerializationUtils.serialize(acg) : null;
- node.getNodeController().startTasks(jobId, jagBytes, taskDescriptors, connectorPolicies,
- jobRun.getFlags());
+ node.getNodeController().startTasks(deploymentId, jobId, jagBytes, taskDescriptors,
+ connectorPolicies, jobRun.getFlags());
} catch (Exception e) {
e.printStackTrace();
}
@@ -452,13 +455,13 @@
}
}
- private void abortJob(Exception exception) {
+ private void abortJob(List<Exception> exceptions) {
Set<TaskCluster> inProgressTaskClustersCopy = new HashSet<TaskCluster>(inProgressTaskClusters);
for (TaskCluster tc : inProgressTaskClustersCopy) {
abortTaskCluster(findLastTaskClusterAttempt(tc));
}
assert inProgressTaskClusters.isEmpty();
- ccs.getWorkQueue().schedule(new JobCleanupWork(ccs, jobRun.getJobId(), JobStatus.FAILURE, exception));
+ ccs.getWorkQueue().schedule(new JobCleanupWork(ccs, jobRun.getJobId(), JobStatus.FAILURE, exceptions));
}
private void abortTaskCluster(TaskClusterAttempt tcAttempt) {
@@ -596,7 +599,7 @@
* @param details
* - Cause of the failure
*/
- public void notifyTaskFailure(TaskAttempt ta, ActivityCluster ac, String details, List<Throwable> caughtExceptions) {
+ public void notifyTaskFailure(TaskAttempt ta, ActivityCluster ac, List<Exception> exceptions) {
try {
LOGGER.fine("Received failure notification for TaskAttempt " + ta.getTaskAttemptId());
TaskAttemptId taId = ta.getTaskAttemptId();
@@ -604,13 +607,13 @@
TaskClusterAttempt lastAttempt = findLastTaskClusterAttempt(tc);
if (lastAttempt != null && taId.getAttempt() == lastAttempt.getAttempt()) {
LOGGER.fine("Marking TaskAttempt " + ta.getTaskAttemptId() + " as failed");
- ta.setStatus(TaskAttempt.TaskStatus.FAILED, details);
+ ta.setStatus(TaskAttempt.TaskStatus.FAILED, exceptions);
abortTaskCluster(lastAttempt);
lastAttempt.setStatus(TaskClusterAttempt.TaskClusterStatus.FAILED);
lastAttempt.setEndTime(System.currentTimeMillis());
abortDoomedTaskClusters();
if (lastAttempt.getAttempt() >= jobRun.getActivityClusterGraph().getMaxReattempts()) {
- abortJob(new HyracksException(caughtExceptions.get(caughtExceptions.size() - 1)));
+ abortJob(exceptions);
return;
}
startRunnableActivityClusters();
@@ -619,7 +622,7 @@
+ lastAttempt);
}
} catch (Exception e) {
- abortJob(e);
+ abortJob(Collections.singletonList(e));
}
}
@@ -644,7 +647,10 @@
for (TaskAttempt ta : lastTaskClusterAttempt.getTaskAttempts().values()) {
assert (ta.getStatus() == TaskAttempt.TaskStatus.COMPLETED || ta.getStatus() == TaskAttempt.TaskStatus.RUNNING);
if (deadNodes.contains(ta.getNodeId())) {
- ta.setStatus(TaskAttempt.TaskStatus.FAILED, "Node " + ta.getNodeId() + " failed");
+ ta.setStatus(
+ TaskAttempt.TaskStatus.FAILED,
+ Collections.singletonList(new Exception("Node " + ta.getNodeId()
+ + " failed")));
ta.setEndTime(System.currentTimeMillis());
abort = true;
}
@@ -659,7 +665,7 @@
}
startRunnableActivityClusters();
} catch (Exception e) {
- abortJob(e);
+ abortJob(Collections.singletonList(e));
}
}
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/ApplicationInstallationHandler.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/ApplicationInstallationHandler.java
new file mode 100755
index 0000000..35165de
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/ApplicationInstallationHandler.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.control.cc.web;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.eclipse.jetty.http.HttpMethods;
+import org.eclipse.jetty.server.Request;
+import org.eclipse.jetty.server.handler.AbstractHandler;
+
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.work.SynchronizableWork;
+
+public class ApplicationInstallationHandler extends AbstractHandler {
+ private ClusterControllerService ccs;
+
+ public ApplicationInstallationHandler(ClusterControllerService ccs) {
+ this.ccs = ccs;
+ }
+
+ @Override
+ public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response)
+ throws IOException, ServletException {
+ try {
+ while (target.startsWith("/")) {
+ target = target.substring(1);
+ }
+ while (target.endsWith("/")) {
+ target = target.substring(0, target.length() - 1);
+ }
+ String[] parts = target.split("/");
+ if (parts.length != 1) {
+ return;
+ }
+ final String[] params = parts[0].split("&");
+ String deployIdString = params[0];
+ String rootDir = ccs.getServerContext().getBaseDir().toString();
+ final String deploymentDir = rootDir.endsWith(File.separator) ? rootDir + "applications/" + deployIdString
+ : rootDir + File.separator + "/applications/" + File.separator + deployIdString;
+ if (HttpMethods.PUT.equals(request.getMethod())) {
+ class OutputStreamGetter extends SynchronizableWork {
+ private OutputStream os;
+
+ @Override
+ protected void doRun() throws Exception {
+ FileUtils.forceMkdir(new File(deploymentDir));
+ String fileName = params[1];
+ File jarFile = new File(deploymentDir, fileName);
+ os = new FileOutputStream(jarFile);
+ }
+ }
+ OutputStreamGetter r = new OutputStreamGetter();
+ try {
+ ccs.getWorkQueue().scheduleAndSync(r);
+ } catch (Exception e) {
+ throw new IOException(e);
+ }
+ try {
+ IOUtils.copyLarge(request.getInputStream(), r.os);
+ } finally {
+ r.os.close();
+ }
+ } else if (HttpMethods.GET.equals(request.getMethod())) {
+ class InputStreamGetter extends SynchronizableWork {
+ private InputStream is;
+
+ @Override
+ protected void doRun() throws Exception {
+ String fileName = params[1];
+ File jarFile = new File(deploymentDir, fileName);
+ is = new FileInputStream(jarFile);
+ }
+ }
+ InputStreamGetter r = new InputStreamGetter();
+ try {
+ ccs.getWorkQueue().scheduleAndSync(r);
+ } catch (Exception e) {
+ throw new IOException(e);
+ }
+ if (r.is == null) {
+ response.setStatus(HttpServletResponse.SC_NOT_FOUND);
+ } else {
+ response.setContentType("application/octet-stream");
+ response.setStatus(HttpServletResponse.SC_OK);
+ try {
+ IOUtils.copyLarge(r.is, response.getOutputStream());
+ } finally {
+ r.is.close();
+ }
+ }
+ }
+ baseRequest.setHandled(true);
+ } catch (IOException e) {
+ e.printStackTrace();
+ throw e;
+ }
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java
index e39e766..cd7a684 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/web/WebServer.java
@@ -70,6 +70,11 @@
addHandler(createAdminConsoleHandler());
addHandler(createStaticResourcesHandler());
+
+ /** the service of uploading/downloading deployment jars */
+ handler = new ContextHandler("/applications");
+ handler.setHandler(new ApplicationInstallationHandler(ccs));
+ addHandler(handler);
}
private Handler createAdminConsoleHandler() {
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java
index b880c8a..50b3e30 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/ApplicationMessageWork.java
@@ -14,14 +14,14 @@
*/
package edu.uci.ics.hyracks.control.cc.work;
-import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import edu.uci.ics.hyracks.api.application.ICCApplicationContext;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.messages.IMessage;
-import edu.uci.ics.hyracks.api.util.JavaSerializationUtils;
import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
import edu.uci.ics.hyracks.control.common.work.AbstractWork;
/**
@@ -31,11 +31,13 @@
private static final Logger LOGGER = Logger.getLogger(ApplicationMessageWork.class.getName());
private byte[] message;
+ private DeploymentId deploymentId;
private String nodeId;
private ClusterControllerService ccs;
- public ApplicationMessageWork(ClusterControllerService ccs, byte[] message, String nodeId) {
+ public ApplicationMessageWork(ClusterControllerService ccs, byte[] message, DeploymentId deploymentId, String nodeId) {
this.ccs = ccs;
+ this.deploymentId = deploymentId;
this.nodeId = nodeId;
this.message = message;
}
@@ -44,17 +46,16 @@
public void run() {
final ICCApplicationContext ctx = ccs.getApplicationContext();
try {
- final IMessage data = (IMessage) JavaSerializationUtils.deserialize(message);
+ final IMessage data = (IMessage) DeploymentUtils.deserialize(message, deploymentId, ctx);
ccs.getExecutor().execute(new Runnable() {
@Override
public void run() {
ctx.getMessageBroker().receivedMessage(data, nodeId);
}
});
- } catch (IOException e) {
+ } catch (Exception e) {
LOGGER.log(Level.WARNING, "Error in stats reporting", e);
- } catch (ClassNotFoundException e) {
- Logger.getLogger(this.getClass().getName()).log(Level.WARNING, "Error in stats reporting", e);
+ throw new RuntimeException(e);
}
}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/CliDeployBinaryWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/CliDeployBinaryWork.java
new file mode 100644
index 0000000..1a889f8
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/CliDeployBinaryWork.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.cc.work;
+
+import java.net.URL;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.UUID;
+
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.cc.NodeControllerState;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentRun;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
+import edu.uci.ics.hyracks.control.common.work.IPCResponder;
+import edu.uci.ics.hyracks.control.common.work.SynchronizableWork;
+
+/***
+ * This is the work happens on the CC for a dynamic deployment.
+ * It first deploys the jar to CC application context.
+ * Then, it remotely calls each NC service to deploy the jars listed as http URLs.
+ * NOTE: in current implementation, a user cannot deploy with the same deployment id simultaneously.
+ *
+ * @author yingyib
+ */
+public class CliDeployBinaryWork extends SynchronizableWork {
+
+ private ClusterControllerService ccs;
+ private List<URL> binaryURLs;
+ private DeploymentId deploymentId;
+ private IPCResponder<DeploymentId> callback;
+
+ public CliDeployBinaryWork(ClusterControllerService ncs, List<URL> binaryURLs, DeploymentId deploymentId,
+ IPCResponder<DeploymentId> callback) {
+ this.ccs = ncs;
+ this.binaryURLs = binaryURLs;
+ this.deploymentId = deploymentId;
+ this.callback = callback;
+ }
+
+ @Override
+ public void doRun() {
+ try {
+ if (deploymentId == null) {
+ deploymentId = new DeploymentId(UUID.randomUUID().toString());
+ }
+ /**
+ * Deploy for the cluster controller
+ */
+ DeploymentUtils.deploy(deploymentId, binaryURLs, ccs.getApplicationContext()
+ .getJobSerializerDeserializerContainer(), ccs.getServerContext(), false);
+
+ /**
+ * Deploy for the node controllers
+ */
+ Map<String, NodeControllerState> nodeControllerStateMap = ccs.getNodeMap();
+
+ Set<String> nodeIds = new TreeSet<String>();
+ for (String nc : nodeControllerStateMap.keySet()) {
+ nodeIds.add(nc);
+ }
+ final DeploymentRun dRun = new DeploymentRun(nodeIds);
+
+ /** The following call prevents a user to deploy with the same deployment id simultaneously. */
+ ccs.addDeploymentRun(deploymentId, dRun);
+
+ /***
+ * deploy binaries to each node controller
+ */
+ for (NodeControllerState ncs : nodeControllerStateMap.values()) {
+ ncs.getNodeController().deployBinary(deploymentId, binaryURLs);
+ }
+
+ ccs.getExecutor().execute(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ /**
+ * wait for completion
+ */
+ dRun.waitForCompletion();
+ ccs.removeDeploymentRun(deploymentId);
+ callback.setValue(deploymentId);
+ } catch (Exception e) {
+ callback.setException(e);
+ }
+ }
+ });
+ } catch (Exception e) {
+ callback.setException(e);
+ }
+ }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/CliUnDeployBinaryWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/CliUnDeployBinaryWork.java
new file mode 100644
index 0000000..d54444c
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/CliUnDeployBinaryWork.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.cc.work;
+
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.UUID;
+
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.cc.NodeControllerState;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentRun;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
+import edu.uci.ics.hyracks.control.common.work.IPCResponder;
+import edu.uci.ics.hyracks.control.common.work.SynchronizableWork;
+
+/***
+ * This is the work happens on the CC for an undeployment.
+ * It first undeploys the jar to CC application context by giving it a deployment id.
+ * Then, it remotely calls each NC service to undeploy by giving them a deployment id.
+ * NOTE: in current implementation, a user cannot undeploy with the same deployment id simultaneously.
+ *
+ * @author yingyib
+ */
+public class CliUnDeployBinaryWork extends SynchronizableWork {
+
+ private ClusterControllerService ccs;
+ private DeploymentId deploymentId;
+ private IPCResponder<DeploymentId> callback;
+
+ public CliUnDeployBinaryWork(ClusterControllerService ncs, DeploymentId deploymentId,
+ IPCResponder<DeploymentId> callback) {
+ this.ccs = ncs;
+ this.deploymentId = deploymentId;
+ this.callback = callback;
+ }
+
+ @Override
+ public void doRun() {
+ try {
+ if (deploymentId == null) {
+ deploymentId = new DeploymentId(UUID.randomUUID().toString());
+ }
+ /**
+ * Deploy for the cluster controller
+ */
+ DeploymentUtils.undeploy(deploymentId, ccs.getApplicationContext().getJobSerializerDeserializerContainer(),
+ ccs.getServerContext());
+
+ /**
+ * Deploy for the node controllers
+ */
+ Map<String, NodeControllerState> nodeControllerStateMap = ccs.getNodeMap();
+
+ Set<String> nodeIds = new TreeSet<String>();
+ for (String nc : nodeControllerStateMap.keySet()) {
+ nodeIds.add(nc);
+ }
+ final DeploymentRun dRun = new DeploymentRun(nodeIds);
+
+ /** The following call prevents a user to undeploy with the same deployment id simultaneously. */
+ ccs.addDeploymentRun(deploymentId, dRun);
+
+ /***
+ * deploy binaries to each node controller
+ */
+ for (NodeControllerState ncs : nodeControllerStateMap.values()) {
+ ncs.getNodeController().undeployBinary(deploymentId);
+ }
+
+ ccs.getExecutor().execute(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ /**
+ * wait for completion
+ */
+ dRun.waitForCompletion();
+ ccs.removeDeploymentRun(deploymentId);
+ callback.setValue(null);
+ } catch (Exception e) {
+ callback.setException(e);
+ }
+ }
+ });
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java
index b304b21..ab218cc 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobCleanupWork.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.hyracks.control.cc.work;
+import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
@@ -36,13 +37,13 @@
private ClusterControllerService ccs;
private JobId jobId;
private JobStatus status;
- private Exception exception;
+ private List<Exception> exceptions;
- public JobCleanupWork(ClusterControllerService ccs, JobId jobId, JobStatus status, Exception exception) {
+ public JobCleanupWork(ClusterControllerService ccs, JobId jobId, JobStatus status, List<Exception> exceptions) {
this.ccs = ccs;
this.jobId = jobId;
this.status = status;
- this.exception = exception;
+ this.exceptions = exceptions;
}
@Override
@@ -58,7 +59,7 @@
}
Set<String> targetNodes = run.getParticipatingNodeIds();
run.getCleanupPendingNodeIds().addAll(targetNodes);
- run.setPendingStatus(status, exception);
+ run.setPendingStatus(status, exceptions);
if (targetNodes != null && !targetNodes.isEmpty()) {
for (String n : targetNodes) {
NodeControllerState ncs = ccs.getNodeMap().get(n);
@@ -77,9 +78,10 @@
e.printStackTrace();
}
}
- run.setStatus(run.getPendingStatus(), run.getPendingException());
+ run.setStatus(run.getPendingStatus(), run.getPendingExceptions());
ccs.getActiveRunMap().remove(jobId);
ccs.getRunMapArchive().put(jobId, run);
+ ccs.getRunHistory().put(jobId, run.getExceptions());
try {
ccs.getJobLogFile().log(createJobLogObject(run));
} catch (Exception e) {
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
index f7d6c4c..0e739ba 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobStartWork.java
@@ -14,17 +14,19 @@
*/
package edu.uci.ics.hyracks.control.cc.work;
+import java.util.Collections;
import java.util.EnumSet;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGenerator;
import edu.uci.ics.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobStatus;
-import edu.uci.ics.hyracks.api.util.JavaSerializationUtils;
import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
import edu.uci.ics.hyracks.control.cc.application.CCApplicationContext;
import edu.uci.ics.hyracks.control.cc.job.JobRun;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
import edu.uci.ics.hyracks.control.common.work.IResultCallback;
import edu.uci.ics.hyracks.control.common.work.SynchronizableWork;
@@ -32,11 +34,13 @@
private final ClusterControllerService ccs;
private final byte[] acggfBytes;
private final EnumSet<JobFlag> jobFlags;
+ private final DeploymentId deploymentId;
private final JobId jobId;
private final IResultCallback<JobId> callback;
- public JobStartWork(ClusterControllerService ccs, byte[] acggfBytes, EnumSet<JobFlag> jobFlags, JobId jobId,
- IResultCallback<JobId> callback) {
+ public JobStartWork(ClusterControllerService ccs, DeploymentId deploymentId, byte[] acggfBytes,
+ EnumSet<JobFlag> jobFlags, JobId jobId, IResultCallback<JobId> callback) {
+ this.deploymentId = deploymentId;
this.jobId = jobId;
this.ccs = ccs;
this.acggfBytes = acggfBytes;
@@ -48,10 +52,10 @@
protected void doRun() throws Exception {
try {
final CCApplicationContext appCtx = ccs.getApplicationContext();
- IActivityClusterGraphGeneratorFactory acggf = (IActivityClusterGraphGeneratorFactory) JavaSerializationUtils
- .deserialize(acggfBytes);
+ IActivityClusterGraphGeneratorFactory acggf = (IActivityClusterGraphGeneratorFactory) DeploymentUtils
+ .deserialize(acggfBytes, deploymentId, appCtx);
IActivityClusterGraphGenerator acgg = acggf.createActivityClusterGraphGenerator(jobId, appCtx, jobFlags);
- JobRun run = new JobRun(ccs, jobId, acgg, jobFlags);
+ JobRun run = new JobRun(ccs, deploymentId, jobId, acgg, jobFlags);
run.setStatus(JobStatus.INITIALIZED, null);
ccs.getActiveRunMap().put(jobId, run);
appCtx.notifyJobCreation(jobId, acggf);
@@ -59,7 +63,8 @@
try {
run.getScheduler().startJob();
} catch (Exception e) {
- ccs.getWorkQueue().schedule(new JobCleanupWork(ccs, run.getJobId(), JobStatus.FAILURE, e));
+ ccs.getWorkQueue().schedule(
+ new JobCleanupWork(ccs, run.getJobId(), JobStatus.FAILURE, Collections.singletonList(e)));
}
callback.setValue(jobId);
} catch (Exception e) {
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java
index ed58c43..63e62a0 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/JobletCleanupNotificationWork.java
@@ -67,9 +67,10 @@
e.printStackTrace();
}
}
- run.setStatus(run.getPendingStatus(), run.getPendingException());
+ run.setStatus(run.getPendingStatus(), run.getPendingExceptions());
ccs.getActiveRunMap().remove(jobId);
ccs.getRunMapArchive().put(jobId, run);
+ ccs.getRunHistory().put(jobId, run.getExceptions());
try {
ccs.getJobLogFile().log(createJobLogObject(run));
} catch (Exception e) {
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/NotifyDeployBinaryWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/NotifyDeployBinaryWork.java
new file mode 100644
index 0000000..c4c8873
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/NotifyDeployBinaryWork.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.cc.work;
+
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentRun;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentStatus;
+import edu.uci.ics.hyracks.control.common.work.AbstractWork;
+
+/***
+ * This is the work happens on the CC when CC gets a deployment or undeployment notification status message from one NC.
+ *
+ * @author yingyib
+ */
+public class NotifyDeployBinaryWork extends AbstractWork {
+
+ private final ClusterControllerService ccs;
+ private final String nodeId;
+ private final DeploymentId deploymentId;
+ private DeploymentStatus deploymentStatus;
+
+ public NotifyDeployBinaryWork(ClusterControllerService ccs, DeploymentId deploymentId, String nodeId,
+ DeploymentStatus deploymentStatus) {
+ this.ccs = ccs;
+ this.nodeId = nodeId;
+ this.deploymentId = deploymentId;
+ this.deploymentStatus = deploymentStatus;
+
+ }
+
+ @Override
+ public void run() {
+ /** triggered remotely by a NC to notify that the NC is deployed */
+ DeploymentRun dRun = ccs.getDeploymentRun(deploymentId);
+ dRun.notifyDeploymentStatus(nodeId, deploymentStatus);
+ }
+
+}
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java
index f7dd1d2..03d43ed 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RegisterNodeWork.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.hyracks.control.cc.work;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@@ -47,6 +48,7 @@
IIPCHandle ncIPCHandle = ccs.getClusterIPC().getHandle(reg.getNodeControllerAddress());
CCNCFunctions.NodeRegistrationResult result = null;
+ Map<String, String> ncConfiguration = null;
try {
INodeController nodeController = new NodeControllerRemoteProxy(ncIPCHandle);
@@ -58,6 +60,8 @@
nodeMap.put(id, state);
Map<String, Set<String>> ipAddressNodeNameMap = ccs.getIpAddressNodeNameMap();
String ipAddress = state.getNCConfig().dataIPAddress;
+ ncConfiguration = new HashMap<String, String>();
+ state.getNCConfig().toMap(ncConfiguration);
Set<String> nodes = ipAddressNodeNameMap.get(ipAddress);
if (nodes == null) {
nodes = new HashSet<String>();
@@ -75,5 +79,6 @@
result = new CCNCFunctions.NodeRegistrationResult(null, e);
}
ncIPCHandle.send(-1, result, null);
+ ccs.getApplicationContext().notifyNodeJoin(id, ncConfiguration);
}
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java
index 7255503..c82e264 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/RemoveDeadNodesWork.java
@@ -65,6 +65,9 @@
}
}
}
+ if (deadNodes != null && deadNodes.size() > 0) {
+ ccs.getApplicationContext().notifyNodeFailure(deadNodes);
+ }
}
@Override
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java
index bf5f314..6a04487 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/TaskFailureWork.java
@@ -24,22 +24,20 @@
import edu.uci.ics.hyracks.control.cc.job.TaskAttempt;
public class TaskFailureWork extends AbstractTaskLifecycleWork {
- private final String details;
- private final List<Throwable> caughtExceptions;
+ private final List<Exception> exceptions;
public TaskFailureWork(ClusterControllerService ccs, JobId jobId, TaskAttemptId taId, String nodeId,
- String details, List<Throwable> caughtExceptions) {
+ List<Exception> exceptions) {
super(ccs, jobId, taId, nodeId);
- this.details = details;
- this.caughtExceptions = caughtExceptions;
+ this.exceptions = exceptions;
}
@Override
protected void performEvent(TaskAttempt ta) {
JobRun run = ccs.getActiveRunMap().get(jobId);
- ccs.getDatasetDirectoryService().reportJobFailure(jobId, caughtExceptions);
+ ccs.getDatasetDirectoryService().reportJobFailure(jobId, exceptions);
ActivityCluster ac = ta.getTask().getTaskCluster().getActivityCluster();
- run.getScheduler().notifyTaskFailure(ta, ac, details, caughtExceptions);
+ run.getScheduler().notifyTaskFailure(ta, ac, exceptions);
}
@Override
diff --git a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java
index 6cfe025..8efea17 100644
--- a/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java
+++ b/hyracks/hyracks-control/hyracks-control-cc/src/main/java/edu/uci/ics/hyracks/control/cc/work/WaitForJobCompletionWork.java
@@ -14,6 +14,8 @@
*/
package edu.uci.ics.hyracks.control.cc.work;
+import java.util.List;
+
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
import edu.uci.ics.hyracks.control.cc.job.IJobStatusConditionVariable;
@@ -48,17 +50,31 @@
});
} else {
final IJobStatusConditionVariable cArchivedVar = ccs.getRunMapArchive().get(jobId);
- ccs.getExecutor().execute(new Runnable() {
- @Override
- public void run() {
- try {
- cArchivedVar.waitForCompletion();
- callback.setValue(null);
- } catch (Exception e) {
- callback.setException(e);
+ if (cArchivedVar != null) {
+ ccs.getExecutor().execute(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ cArchivedVar.waitForCompletion();
+ callback.setValue(null);
+ } catch (Exception e) {
+ callback.setException(e);
+ }
}
- }
- });
+ });
+ } else {
+ final List<Exception> exceptions = ccs.getRunHistory().get(jobId);
+ ccs.getExecutor().execute(new Runnable() {
+ @Override
+ public void run() {
+ callback.setValue(null);
+ if (exceptions != null && exceptions.size() > 0) {
+ /** only report the first exception because IResultCallback will only throw one exception anyway */
+ callback.setException(exceptions.get(0));
+ }
+ }
+ });
+ }
}
}
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-common/pom.xml b/hyracks/hyracks-control/hyracks-control-common/pom.xml
index 4ffb716..15e2840 100644
--- a/hyracks/hyracks-control/hyracks-control-common/pom.xml
+++ b/hyracks/hyracks-control/hyracks-control-common/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,7 +27,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java
index 5251584..58ae79e 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/application/ApplicationContext.java
@@ -18,6 +18,8 @@
import java.io.Serializable;
import edu.uci.ics.hyracks.api.application.IApplicationContext;
+import edu.uci.ics.hyracks.api.job.IJobSerializerDeserializerContainer;
+import edu.uci.ics.hyracks.api.job.JobSerializerDeserializerContainer;
import edu.uci.ics.hyracks.api.messages.IMessageBroker;
import edu.uci.ics.hyracks.control.common.context.ServerContext;
@@ -25,6 +27,7 @@
protected ServerContext serverCtx;
protected Serializable distributedState;
protected IMessageBroker messageBroker;
+ protected IJobSerializerDeserializerContainer jobSerDeContainer = new JobSerializerDeserializerContainer();
public ApplicationContext(ServerContext serverCtx) throws IOException {
this.serverCtx = serverCtx;
@@ -44,4 +47,9 @@
public IMessageBroker getMessageBroker() {
return this.messageBroker;
}
+
+ @Override
+ public IJobSerializerDeserializerContainer getJobSerializerDeserializerContainer() {
+ return this.jobSerDeContainer;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
index 1a4dc73..9954dc3 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/IClusterController.java
@@ -19,8 +19,10 @@
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentStatus;
import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
@@ -35,11 +37,13 @@
public void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
throws Exception;
- public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, String details,
- List<Throwable> caughtExceptions) throws Exception;
+ public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, List<Exception> exceptions)
+ throws Exception;
public void notifyJobletCleanup(JobId jobId, String nodeId) throws Exception;
+ public void notifyDeployBinary(DeploymentId deploymentId, String nodeId, DeploymentStatus status) throws Exception;
+
public void nodeHeartbeat(String id, HeartbeatData hbData) throws Exception;
public void reportProfile(String id, List<JobProfile> profiles) throws Exception;
@@ -48,7 +52,7 @@
public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception;
- public void sendApplicationMessageToCC(byte[] data, String nodeId) throws Exception;
+ public void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception;
public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult, int partition,
int nPartitions, NetworkAddress networkAddress) throws Exception;
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java
index c589c97..cf27740 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/base/INodeController.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.hyracks.control.common.base;
+import java.net.URL;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
@@ -22,6 +23,7 @@
import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobStatus;
@@ -29,7 +31,8 @@
import edu.uci.ics.hyracks.control.common.job.TaskAttemptDescriptor;
public interface INodeController {
- public void startTasks(JobId jobId, byte[] planBytes, List<TaskAttemptDescriptor> taskDescriptors,
+ public void startTasks(DeploymentId deploymentId, JobId jobId, byte[] planBytes,
+ List<TaskAttemptDescriptor> taskDescriptors,
Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies, EnumSet<JobFlag> flags) throws Exception;
public void abortTasks(JobId jobId, List<TaskAttemptId> tasks) throws Exception;
@@ -37,4 +40,8 @@
public void cleanUpJoblet(JobId jobId, JobStatus status) throws Exception;
public void reportPartitionAvailability(PartitionId pid, NetworkAddress networkAddress) throws Exception;
+
+ public void deployBinary(DeploymentId deploymentId, List<URL> url) throws Exception;
+
+ public void undeployBinary(DeploymentId deploymentId) throws Exception;
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
index ec29592..633d4fb 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/controllers/NCConfig.java
@@ -16,86 +16,107 @@
import java.io.Serializable;
import java.util.List;
+import java.util.Map;
import org.kohsuke.args4j.Argument;
import org.kohsuke.args4j.Option;
import org.kohsuke.args4j.spi.StopOptionHandler;
public class NCConfig implements Serializable {
- private static final long serialVersionUID = 1L;
+ private static final long serialVersionUID = 1L;
- @Option(name = "-cc-host", usage = "Cluster Controller host name", required = true)
- public String ccHost;
+ @Option(name = "-cc-host", usage = "Cluster Controller host name", required = true)
+ public String ccHost;
- @Option(name = "-cc-port", usage = "Cluster Controller port (default: 1099)")
- public int ccPort = 1099;
+ @Option(name = "-cc-port", usage = "Cluster Controller port (default: 1099)")
+ public int ccPort = 1099;
- @Option(name = "-cluster-net-ip-address", usage = "IP Address to bind cluster listener", required = true)
- public String clusterNetIPAddress;
+ @Option(name = "-cluster-net-ip-address", usage = "IP Address to bind cluster listener", required = true)
+ public String clusterNetIPAddress;
- @Option(name = "-node-id", usage = "Logical name of node controller unique within the cluster", required = true)
- public String nodeId;
+ @Option(name = "-node-id", usage = "Logical name of node controller unique within the cluster", required = true)
+ public String nodeId;
- @Option(name = "-data-ip-address", usage = "IP Address to bind data listener", required = true)
- public String dataIPAddress;
+ @Option(name = "-data-ip-address", usage = "IP Address to bind data listener", required = true)
+ public String dataIPAddress;
- @Option(name = "-result-ip-address", usage = "IP Address to bind dataset result distribution listener", required = true)
- public String datasetIPAddress;
+ @Option(name = "-result-ip-address", usage = "IP Address to bind dataset result distribution listener", required = true)
+ public String datasetIPAddress;
- @Option(name = "-iodevices", usage = "Comma separated list of IO Device mount points (default: One device in default temp folder)", required = false)
- public String ioDevices = System.getProperty("java.io.tmpdir");
+ @Option(name = "-iodevices", usage = "Comma separated list of IO Device mount points (default: One device in default temp folder)", required = false)
+ public String ioDevices = System.getProperty("java.io.tmpdir");
- @Option(name = "-net-thread-count", usage = "Number of threads to use for Network I/O (default: 1)")
- public int nNetThreads = 1;
+ @Option(name = "-net-thread-count", usage = "Number of threads to use for Network I/O (default: 1)")
+ public int nNetThreads = 1;
- @Option(name = "-max-memory", usage = "Maximum memory usable at this Node Controller in bytes (default: -1 auto)")
- public int maxMemory = -1;
+ @Option(name = "-max-memory", usage = "Maximum memory usable at this Node Controller in bytes (default: -1 auto)")
+ public int maxMemory = -1;
- @Option(name = "-result-history-size", usage = "Limits the number of jobs whose results should be remembered by the system to the specified value. (default: 10)")
- public int resultHistorySize = 100;
+ @Option(name = "-result-history-size", usage = "Limits the number of jobs whose results should be remembered by the system to the specified value. (default: 10)")
+ public int resultHistorySize = 100;
- @Option(name = "-result-manager-memory", usage = "Memory usable for result caching at this Node Controller in bytes (default: -1 auto)")
- public int resultManagerMemory = -1;
+ @Option(name = "-result-manager-memory", usage = "Memory usable for result caching at this Node Controller in bytes (default: -1 auto)")
+ public int resultManagerMemory = -1;
- @Option(name = "-app-nc-main-class", usage = "Application NC Main Class")
- public String appNCMainClass;
+ @Option(name = "-app-nc-main-class", usage = "Application NC Main Class")
+ public String appNCMainClass;
- @Argument
- @Option(name = "--", handler = StopOptionHandler.class)
- public List<String> appArgs;
+ @Argument
+ @Option(name = "--", handler = StopOptionHandler.class)
+ public List<String> appArgs;
- public void toCommandLine(List<String> cList) {
- cList.add("-cc-host");
- cList.add(ccHost);
- cList.add("-cc-port");
- cList.add(String.valueOf(ccPort));
- cList.add("-cluster-net-ip-address");
- cList.add(clusterNetIPAddress);
- cList.add("-node-id");
- cList.add(nodeId);
- cList.add("-data-ip-address");
- cList.add(dataIPAddress);
- cList.add(datasetIPAddress);
- cList.add("-iodevices");
- cList.add(ioDevices);
- cList.add("-net-thread-count");
- cList.add(String.valueOf(nNetThreads));
- cList.add("-max-memory");
- cList.add(String.valueOf(maxMemory));
- cList.add("-result-history-size");
- cList.add(String.valueOf(resultHistorySize));
- cList.add("-result-manager-memory");
- cList.add(String.valueOf(resultManagerMemory));
+ public void toCommandLine(List<String> cList) {
+ cList.add("-cc-host");
+ cList.add(ccHost);
+ cList.add("-cc-port");
+ cList.add(String.valueOf(ccPort));
+ cList.add("-cluster-net-ip-address");
+ cList.add(clusterNetIPAddress);
+ cList.add("-node-id");
+ cList.add(nodeId);
+ cList.add("-data-ip-address");
+ cList.add(dataIPAddress);
+ cList.add(datasetIPAddress);
+ cList.add("-iodevices");
+ cList.add(ioDevices);
+ cList.add("-net-thread-count");
+ cList.add(String.valueOf(nNetThreads));
+ cList.add("-max-memory");
+ cList.add(String.valueOf(maxMemory));
+ cList.add("-result-history-size");
+ cList.add(String.valueOf(resultHistorySize));
+ cList.add("-result-manager-memory");
+ cList.add(String.valueOf(resultManagerMemory));
- if (appNCMainClass != null) {
- cList.add("-app-nc-main-class");
- cList.add(appNCMainClass);
- }
- if (appArgs != null && !appArgs.isEmpty()) {
- cList.add("--");
- for (String appArg : appArgs) {
- cList.add(appArg);
- }
- }
- }
-}
+ if (appNCMainClass != null) {
+ cList.add("-app-nc-main-class");
+ cList.add(appNCMainClass);
+ }
+ if (appArgs != null && !appArgs.isEmpty()) {
+ cList.add("--");
+ for (String appArg : appArgs) {
+ cList.add(appArg);
+ }
+ }
+ }
+
+ public void toMap(Map<String, String> configuration) {
+ configuration.put("cc-host", ccHost);
+ configuration.put("cc-port", (String.valueOf(ccPort)));
+ configuration.put("cluster-net-ip-address", clusterNetIPAddress);
+ configuration.put("node-id", nodeId);
+ configuration.put("data-ip-address", dataIPAddress);
+ configuration.put("iodevices", ioDevices);
+ configuration.put("net-thread-count", String.valueOf(nNetThreads));
+ configuration.put("max-memory", String.valueOf(maxMemory));
+ configuration.put("result-history-size",
+ String.valueOf(resultHistorySize));
+ configuration.put("result-manager-memory",
+ String.valueOf(resultManagerMemory));
+
+ if (appNCMainClass != null) {
+ configuration.put("app-nc-main-class", appNCMainClass);
+ }
+
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/ClassLoaderJobSerializerDeserializer.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/ClassLoaderJobSerializerDeserializer.java
new file mode 100644
index 0000000..3a35c25
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/ClassLoaderJobSerializerDeserializer.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.common.deployment;
+
+import java.io.Serializable;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.IJobSerializerDeserializer;
+import edu.uci.ics.hyracks.api.util.JavaSerializationUtils;
+
+/**
+ * This is the IJobSerializerDeserializer implementation for jobs with dynamic deployed jars.
+ *
+ * @author yingyib
+ *
+ */
+public class ClassLoaderJobSerializerDeserializer implements IJobSerializerDeserializer {
+
+ private URLClassLoader classLoader;
+
+ @Override
+ public Object deserialize(byte[] jsBytes) throws HyracksException {
+ try {
+ if (classLoader == null) {
+ return JavaSerializationUtils.deserialize(jsBytes);
+ }
+ return JavaSerializationUtils.deserialize(jsBytes, classLoader);
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ @Override
+ public byte[] serialize(Serializable jobSpec) throws HyracksException {
+ try {
+ if (classLoader == null) {
+ return JavaSerializationUtils.serialize(jobSpec);
+ }
+ return JavaSerializationUtils.serialize(jobSpec, classLoader);
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ @Override
+ public void addClassPathURLs(List<URL> binaryURLs) throws HyracksException {
+ Collections.sort(binaryURLs, new Comparator<URL>() {
+ @Override
+ public int compare(URL o1, URL o2) {
+ return o1.getFile().compareTo(o2.getFile());
+ }
+ });
+ try {
+ if (classLoader == null) {
+ /** crate a new classloader */
+ URL[] urls = binaryURLs.toArray(new URL[binaryURLs.size()]);
+ classLoader = new URLClassLoader(urls, this.getClass().getClassLoader());
+ } else {
+ /** add URLs to the existing classloader */
+ Object[] urls = binaryURLs.toArray(new URL[binaryURLs.size()]);
+ Method method = classLoader.getClass().getDeclaredMethod("addURL", new Class[] { URL.class });
+ method.setAccessible(true);
+ method.invoke(classLoader, urls);
+ }
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ @Override
+ public Class<?> loadClass(String className) throws HyracksException {
+ try {
+ return classLoader.loadClass(className);
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ @Override
+ public ClassLoader getClassLoader() throws HyracksException {
+ return classLoader;
+ }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentRun.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentRun.java
new file mode 100644
index 0000000..84cfbb1
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentRun.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.common.deployment;
+
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * The class maintain the status of a deployment process and the states
+ * of all slave machines involved in the deployment.
+ *
+ * @author yingyib
+ */
+public class DeploymentRun implements IDeploymentStatusConditionVariable {
+
+ private DeploymentStatus deploymentStatus = DeploymentStatus.FAIL;
+ private final Set<String> deploymentNodeIds = new TreeSet<String>();
+
+ public DeploymentRun(Set<String> nodeIds) {
+ deploymentNodeIds.addAll(nodeIds);
+ }
+
+ /**
+ * One notify the deployment status
+ *
+ * @param nodeId
+ * @param status
+ */
+ public synchronized void notifyDeploymentStatus(String nodeId, DeploymentStatus status) {
+ if (status == DeploymentStatus.SUCCEED) {
+ deploymentNodeIds.remove(nodeId);
+ if (deploymentNodeIds.size() == 0) {
+ deploymentStatus = DeploymentStatus.SUCCEED;
+ notifyAll();
+ }
+ } else {
+ deploymentNodeIds.clear();
+ deploymentStatus = DeploymentStatus.FAIL;
+ notifyAll();
+ }
+ }
+
+ @Override
+ public synchronized DeploymentStatus waitForCompletion() throws Exception {
+ wait();
+ return deploymentStatus;
+ }
+
+}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentStatus.java
similarity index 68%
copy from hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
copy to hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentStatus.java
index 8f5ed64..4ba50b7 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentStatus.java
@@ -1,21 +1,21 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.hyracks.api.dataset;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+package edu.uci.ics.hyracks.control.common.deployment;
-public interface IDatasetPartitionReader {
- public void writeTo(IFrameWriter writer);
+public enum DeploymentStatus {
+ SUCCEED,
+ FAIL
}
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentUtils.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentUtils.java
new file mode 100644
index 0000000..0677e2e
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/DeploymentUtils.java
@@ -0,0 +1,216 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.common.deployment;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.impl.client.DefaultHttpClient;
+
+import edu.uci.ics.hyracks.api.application.IApplicationContext;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.job.IJobSerializerDeserializer;
+import edu.uci.ics.hyracks.api.job.IJobSerializerDeserializerContainer;
+import edu.uci.ics.hyracks.api.util.JavaSerializationUtils;
+import edu.uci.ics.hyracks.control.common.context.ServerContext;
+
+/**
+ * A utility class which is in charge of the actual work of deployments.
+ *
+ * @author yingyib
+ */
+public class DeploymentUtils {
+
+ private static final String DEPLOYMENT = "applications";
+
+ /**
+ * undeploy an existing deployment
+ *
+ * @param deploymentId
+ * the deployment id
+ * @param container
+ * @param ctx
+ * @throws HyracksException
+ */
+ public static void undeploy(DeploymentId deploymentId, IJobSerializerDeserializerContainer container,
+ ServerContext ctx) throws HyracksException {
+ container.removeJobSerializerDeserializer(deploymentId);
+ String rootDir = ctx.getBaseDir().toString();
+ String deploymentDir = rootDir.endsWith(File.separator) ? rootDir + DEPLOYMENT + File.separator + deploymentId
+ : rootDir + File.separator + DEPLOYMENT + File.separator + deploymentId;
+ try {
+ File dFile = new File(deploymentDir);
+ if (dFile.exists()) {
+ FileUtils.forceDelete(dFile);
+ }
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ /**
+ * Deploying jars in NC or CC
+ *
+ * @param deploymentId
+ * the deployment id
+ * @param urls
+ * the jar URLs
+ * @param container
+ * the container of serailizer/deserializer
+ * @param ctx
+ * the ServerContext
+ * @param isNC
+ * true is NC/false is CC
+ * @throws HyracksException
+ */
+ public static void deploy(DeploymentId deploymentId, List<URL> urls, IJobSerializerDeserializerContainer container,
+ ServerContext ctx, boolean isNC) throws HyracksException {
+ IJobSerializerDeserializer jobSerDe = container.getJobSerializerDeerializer(deploymentId);
+ if (jobSerDe == null) {
+ jobSerDe = new ClassLoaderJobSerializerDeserializer();
+ container.addJobSerializerDeserializer(deploymentId, jobSerDe);
+ }
+ String rootDir = ctx.getBaseDir().toString();
+ String deploymentDir = rootDir.endsWith(File.separator) ? rootDir + DEPLOYMENT + File.separator + deploymentId
+ : rootDir + File.separator + DEPLOYMENT + File.separator + deploymentId;
+ jobSerDe.addClassPathURLs(downloadURLs(urls, deploymentDir, isNC));
+ }
+
+ /**
+ * Deserialize bytes to an object according to a specific deployment
+ *
+ * @param bytes
+ * the bytes to be deserialized
+ * @param deploymentId
+ * the deployment id
+ * @param appCtx
+ * @return the deserialized object
+ * @throws HyracksException
+ */
+ public static Object deserialize(byte[] bytes, DeploymentId deploymentId, IApplicationContext appCtx)
+ throws HyracksException {
+ try {
+ IJobSerializerDeserializerContainer jobSerDeContainer = appCtx.getJobSerializerDeserializerContainer();
+ IJobSerializerDeserializer jobSerDe = deploymentId == null ? null : jobSerDeContainer
+ .getJobSerializerDeerializer(deploymentId);
+ Object obj = jobSerDe == null ? JavaSerializationUtils.deserialize(bytes) : jobSerDe.deserialize(bytes);
+ return obj;
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ /**
+ * Load a class from its class name
+ *
+ * @param className
+ * @param deploymentId
+ * @param appCtx
+ * @return the loaded class
+ * @throws HyracksException
+ */
+ public static Class<?> loadClass(String className, DeploymentId deploymentId, IApplicationContext appCtx)
+ throws HyracksException {
+ try {
+ IJobSerializerDeserializerContainer jobSerDeContainer = appCtx.getJobSerializerDeserializerContainer();
+ IJobSerializerDeserializer jobSerDe = deploymentId == null ? null : jobSerDeContainer
+ .getJobSerializerDeerializer(deploymentId);
+ Class<?> cl = jobSerDe == null ? JavaSerializationUtils.loadClass(className) : jobSerDe
+ .loadClass(className);
+ return cl;
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ /**
+ * Get the classloader of a specific deployment
+ *
+ * @param deploymentId
+ * @param appCtx
+ * @return
+ * @throws HyracksException
+ */
+ public static ClassLoader getClassLoader(DeploymentId deploymentId, IApplicationContext appCtx)
+ throws HyracksException {
+ try {
+ IJobSerializerDeserializerContainer jobSerDeContainer = appCtx.getJobSerializerDeserializerContainer();
+ IJobSerializerDeserializer jobSerDe = deploymentId == null ? null : jobSerDeContainer
+ .getJobSerializerDeerializer(deploymentId);
+ ClassLoader cl = jobSerDe == null ? DeploymentUtils.class.getClassLoader() : jobSerDe.getClassLoader();
+ return cl;
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+
+ /**
+ * Download remote Http URLs and return the stored local file URLs
+ *
+ * @param urls
+ * the remote Http URLs
+ * @param deploymentDir
+ * the deployment jar storage directory
+ * @param isNC
+ * true is NC/false is CC
+ * @return a list of local file URLs
+ * @throws HyracksException
+ */
+ private static List<URL> downloadURLs(List<URL> urls, String deploymentDir, boolean isNC) throws HyracksException {
+ try {
+ List<URL> downloadedFileURLs = new ArrayList<URL>();
+ File dir = new File(deploymentDir);
+ if (!dir.exists()) {
+ FileUtils.forceMkdir(dir);
+ }
+ for (URL url : urls) {
+ String urlString = url.toString();
+ int slashIndex = urlString.lastIndexOf('/');
+ String fileName = urlString.substring(slashIndex + 1).split("&")[1];
+ String filePath = deploymentDir + File.separator + fileName;
+ File targetFile = new File(filePath);
+ if (isNC) {
+ HttpClient hc = new DefaultHttpClient();
+ HttpGet get = new HttpGet(url.toString());
+ HttpResponse response = hc.execute(get);
+ InputStream is = response.getEntity().getContent();
+ OutputStream os = new FileOutputStream(targetFile);
+ try {
+ IOUtils.copyLarge(is, os);
+ } finally {
+ os.close();
+ is.close();
+ }
+ }
+ downloadedFileURLs.add(targetFile.toURI().toURL());
+ }
+ return downloadedFileURLs;
+ } catch (Exception e) {
+ throw new HyracksException(e);
+ }
+ }
+}
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/IDeploymentStatusConditionVariable.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/IDeploymentStatusConditionVariable.java
new file mode 100644
index 0000000..771baaa
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/deployment/IDeploymentStatusConditionVariable.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.common.deployment;
+
+/**
+ * The conditional variable interface for deployment
+ *
+ * @author yingyib
+ */
+public interface IDeploymentStatusConditionVariable {
+
+ /**
+ * synchronously wait the deployment to complete
+ *
+ * @return the deployment status
+ * @throws Exception
+ */
+ public DeploymentStatus waitForCompletion() throws Exception;
+}
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
index 3755bd2..a6382de 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/CCNCFunctions.java
@@ -21,6 +21,7 @@
import java.io.IOException;
import java.io.OutputStream;
import java.io.Serializable;
+import java.net.URL;
import java.nio.ByteBuffer;
import java.util.EnumSet;
import java.util.List;
@@ -37,12 +38,14 @@
import edu.uci.ics.hyracks.api.dataflow.TaskId;
import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobStatus;
import edu.uci.ics.hyracks.api.partitions.PartitionId;
import edu.uci.ics.hyracks.control.common.controllers.NodeParameters;
import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentStatus;
import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
@@ -81,14 +84,23 @@
GET_NODE_CONTROLLERS_INFO,
GET_NODE_CONTROLLERS_INFO_RESPONSE,
+ DEPLOY_BINARY,
+ NOTIFY_DEPLOY_BINARY,
+ UNDEPLOY_BINARY,
+
OTHER
}
public static class SendApplicationMessageFunction extends Function {
private static final long serialVersionUID = 1L;
private byte[] serializedMessage;
+ private DeploymentId deploymentId;
private String nodeId;
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+
public String getNodeId() {
return nodeId;
}
@@ -101,8 +113,9 @@
return serializedMessage;
}
- public SendApplicationMessageFunction(byte[] data, String nodeId) {
+ public SendApplicationMessageFunction(byte[] data, DeploymentId deploymentId, String nodeId) {
this.serializedMessage = data;
+ this.deploymentId = deploymentId;
this.nodeId = nodeId;
}
@@ -200,16 +213,13 @@
private final JobId jobId;
private final TaskAttemptId taskId;
private final String nodeId;
- private final String details;
- private final List<Throwable> caughtExceptions;
+ private final List<Exception> exceptions;
- public NotifyTaskFailureFunction(JobId jobId, TaskAttemptId taskId, String nodeId, String details,
- List<Throwable> caughtExceptions) {
+ public NotifyTaskFailureFunction(JobId jobId, TaskAttemptId taskId, String nodeId, List<Exception> exceptions) {
this.jobId = jobId;
this.taskId = taskId;
this.nodeId = nodeId;
- this.details = details;
- this.caughtExceptions = caughtExceptions;
+ this.exceptions = exceptions;
}
@Override
@@ -229,12 +239,8 @@
return nodeId;
}
- public String getDetails() {
- return details;
- }
-
- public List<Throwable> getCaughtExceptions() {
- return caughtExceptions;
+ public List<Exception> getExceptions() {
+ return exceptions;
}
}
@@ -588,14 +594,17 @@
public static class StartTasksFunction extends Function {
private static final long serialVersionUID = 1L;
+ private final DeploymentId deploymentId;
private final JobId jobId;
private final byte[] planBytes;
private final List<TaskAttemptDescriptor> taskDescriptors;
private final Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies;
private final EnumSet<JobFlag> flags;
- public StartTasksFunction(JobId jobId, byte[] planBytes, List<TaskAttemptDescriptor> taskDescriptors,
+ public StartTasksFunction(DeploymentId deploymentId, JobId jobId, byte[] planBytes,
+ List<TaskAttemptDescriptor> taskDescriptors,
Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies, EnumSet<JobFlag> flags) {
+ this.deploymentId = deploymentId;
this.jobId = jobId;
this.planBytes = planBytes;
this.taskDescriptors = taskDescriptors;
@@ -608,6 +617,10 @@
return FunctionId.START_TASKS;
}
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+
public JobId getJobId() {
return jobId;
}
@@ -757,6 +770,81 @@
}
}
+ public static class DeployBinaryFunction extends Function {
+ private static final long serialVersionUID = 1L;
+
+ private final List<URL> binaryURLs;
+ private final DeploymentId deploymentId;
+
+ public DeployBinaryFunction(DeploymentId deploymentId, List<URL> binaryURLs) {
+ this.binaryURLs = binaryURLs;
+ this.deploymentId = deploymentId;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.DEPLOY_BINARY;
+ }
+
+ public List<URL> getBinaryURLs() {
+ return binaryURLs;
+ }
+
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+ }
+
+ public static class UnDeployBinaryFunction extends Function {
+ private static final long serialVersionUID = 1L;
+
+ private final DeploymentId deploymentId;
+
+ public UnDeployBinaryFunction(DeploymentId deploymentId) {
+ this.deploymentId = deploymentId;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.UNDEPLOY_BINARY;
+ }
+
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+ }
+
+ public static class NotifyDeployBinaryFunction extends Function {
+ private static final long serialVersionUID = 1L;
+
+ private final String nodeId;
+ private final DeploymentId deploymentId;
+ private final DeploymentStatus deploymentStatus;
+
+ public NotifyDeployBinaryFunction(DeploymentId deploymentId, String nodeId, DeploymentStatus deploymentStatus) {
+ this.nodeId = nodeId;
+ this.deploymentId = deploymentId;
+ this.deploymentStatus = deploymentStatus;
+ }
+
+ @Override
+ public FunctionId getFunctionId() {
+ return FunctionId.NOTIFY_DEPLOY_BINARY;
+ }
+
+ public String getNodeId() {
+ return nodeId;
+ }
+
+ public DeploymentId getDeploymentId() {
+ return deploymentId;
+ }
+
+ public DeploymentStatus getDeploymentStatus() {
+ return deploymentStatus;
+ }
+ }
+
public static class SerializerDeserializer implements IPayloadSerializerDeserializer {
private final JavaSerializationBasedPayloadSerializerDeserializer javaSerde;
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
index 8485152..fec35ac 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -19,9 +19,11 @@
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.control.common.base.IClusterController;
import edu.uci.ics.hyracks.control.common.controllers.NodeRegistration;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentStatus;
import edu.uci.ics.hyracks.control.common.heartbeat.HeartbeatData;
import edu.uci.ics.hyracks.control.common.job.PartitionDescriptor;
import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
@@ -57,10 +59,10 @@
}
@Override
- public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, String details,
- List<Throwable> caughtExceptions) throws Exception {
+ public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, List<Exception> exceptions)
+ throws Exception {
CCNCFunctions.NotifyTaskFailureFunction fn = new CCNCFunctions.NotifyTaskFailureFunction(jobId, taskId, nodeId,
- details, caughtExceptions);
+ exceptions);
ipcHandle.send(-1, fn, null);
}
@@ -71,6 +73,13 @@
}
@Override
+ public void notifyDeployBinary(DeploymentId deploymentId, String nodeId, DeploymentStatus status) throws Exception {
+ CCNCFunctions.NotifyDeployBinaryFunction fn = new CCNCFunctions.NotifyDeployBinaryFunction(deploymentId,
+ nodeId, status);
+ ipcHandle.send(-1, fn, null);
+ }
+
+ @Override
public void nodeHeartbeat(String id, HeartbeatData hbData) throws Exception {
CCNCFunctions.NodeHeartbeatFunction fn = new CCNCFunctions.NodeHeartbeatFunction(id, hbData);
ipcHandle.send(-1, fn, null);
@@ -97,8 +106,9 @@
}
@Override
- public void sendApplicationMessageToCC(byte[] data, String nodeId) throws Exception {
- CCNCFunctions.SendApplicationMessageFunction fn = new CCNCFunctions.SendApplicationMessageFunction(data, nodeId);
+ public void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception {
+ CCNCFunctions.SendApplicationMessageFunction fn = new CCNCFunctions.SendApplicationMessageFunction(data,
+ deploymentId, nodeId);
ipcHandle.send(-1, fn, null);
}
@@ -127,4 +137,5 @@
public void getNodeControllerInfos() throws Exception {
ipcHandle.send(-1, new CCNCFunctions.GetNodeControllersInfoFunction(), null);
}
+
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
index e4355aa..8346ecb 100644
--- a/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
+++ b/hyracks/hyracks-control/hyracks-control-common/src/main/java/edu/uci/ics/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.hyracks.control.common.ipc;
+import java.net.URL;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
@@ -22,6 +23,7 @@
import edu.uci.ics.hyracks.api.dataflow.ConnectorDescriptorId;
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.job.JobStatus;
@@ -38,10 +40,11 @@
}
@Override
- public void startTasks(JobId jobId, byte[] planBytes, List<TaskAttemptDescriptor> taskDescriptors,
+ public void startTasks(DeploymentId deploymentId, JobId jobId, byte[] planBytes,
+ List<TaskAttemptDescriptor> taskDescriptors,
Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies, EnumSet<JobFlag> flags) throws Exception {
- CCNCFunctions.StartTasksFunction stf = new CCNCFunctions.StartTasksFunction(jobId, planBytes, taskDescriptors,
- connectorPolicies, flags);
+ CCNCFunctions.StartTasksFunction stf = new CCNCFunctions.StartTasksFunction(deploymentId, jobId, planBytes,
+ taskDescriptors, connectorPolicies, flags);
ipcHandle.send(-1, stf, null);
}
@@ -63,4 +66,16 @@
pid, networkAddress);
ipcHandle.send(-1, rpaf, null);
}
+
+ @Override
+ public void deployBinary(DeploymentId deploymentId, List<URL> binaryURLs) throws Exception {
+ CCNCFunctions.DeployBinaryFunction rpaf = new CCNCFunctions.DeployBinaryFunction(deploymentId, binaryURLs);
+ ipcHandle.send(-1, rpaf, null);
+ }
+
+ @Override
+ public void undeployBinary(DeploymentId deploymentId) throws Exception {
+ CCNCFunctions.UnDeployBinaryFunction rpaf = new CCNCFunctions.UnDeployBinaryFunction(deploymentId);
+ ipcHandle.send(-1, rpaf, null);
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/pom.xml b/hyracks/hyracks-control/hyracks-control-nc/pom.xml
index ad83d86..f554523 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/pom.xml
+++ b/hyracks/hyracks-control/hyracks-control-nc/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -26,19 +26,19 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-net</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-comm</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
<reporting>
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java
index 3855b4d..c6a03dc 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Joblet.java
@@ -27,6 +27,7 @@
import edu.uci.ics.hyracks.api.context.IHyracksJobletContext;
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.io.FileReference;
@@ -43,6 +44,7 @@
import edu.uci.ics.hyracks.api.job.profiling.counters.ICounterContext;
import edu.uci.ics.hyracks.api.partitions.PartitionId;
import edu.uci.ics.hyracks.api.resources.IDeallocatable;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
import edu.uci.ics.hyracks.control.common.job.PartitionRequest;
import edu.uci.ics.hyracks.control.common.job.PartitionState;
import edu.uci.ics.hyracks.control.common.job.profiling.counters.Counter;
@@ -58,6 +60,8 @@
private final INCApplicationContext appCtx;
+ private final DeploymentId deploymentId;
+
private final JobId jobId;
private final ActivityClusterGraph acg;
@@ -86,10 +90,11 @@
private boolean cleanupPending;
- public Joblet(NodeControllerService nodeController, JobId jobId, INCApplicationContext appCtx,
- ActivityClusterGraph acg) {
+ public Joblet(NodeControllerService nodeController, DeploymentId deploymentId, JobId jobId,
+ INCApplicationContext appCtx, ActivityClusterGraph acg) {
this.nodeController = nodeController;
this.appCtx = appCtx;
+ this.deploymentId = deploymentId;
this.jobId = jobId;
this.frameSize = acg.getFrameSize();
this.acg = acg;
@@ -283,4 +288,22 @@
e.printStackTrace();
}
}
+
+ @Override
+ public Class<?> loadClass(String className) {
+ try {
+ return DeploymentUtils.loadClass(className, deploymentId, appCtx);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public ClassLoader getClassLoader() {
+ try {
+ return DeploymentUtils.getClassLoader(deploymentId, appCtx);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
index e15c60e..a1c3b08 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/NodeControllerService.java
@@ -47,6 +47,7 @@
import edu.uci.ics.hyracks.api.client.NodeControllerInfo;
import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.io.IODeviceHandle;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.control.common.AbstractRemoteService;
@@ -73,8 +74,10 @@
import edu.uci.ics.hyracks.control.nc.work.ApplicationMessageWork;
import edu.uci.ics.hyracks.control.nc.work.BuildJobProfilesWork;
import edu.uci.ics.hyracks.control.nc.work.CleanupJobletWork;
+import edu.uci.ics.hyracks.control.nc.work.DeployBinaryWork;
import edu.uci.ics.hyracks.control.nc.work.ReportPartitionAvailabilityWork;
import edu.uci.ics.hyracks.control.nc.work.StartTasksWork;
+import edu.uci.ics.hyracks.control.nc.work.UnDeployBinaryWork;
import edu.uci.ics.hyracks.ipc.api.IIPCHandle;
import edu.uci.ics.hyracks.ipc.api.IIPCI;
import edu.uci.ics.hyracks.ipc.api.IPCPerformanceCounters;
@@ -444,13 +447,14 @@
case SEND_APPLICATION_MESSAGE: {
CCNCFunctions.SendApplicationMessageFunction amf = (CCNCFunctions.SendApplicationMessageFunction) fn;
queue.schedule(new ApplicationMessageWork(NodeControllerService.this, amf.getMessage(), amf
- .getNodeId()));
+ .getDeploymentId(), amf.getNodeId()));
return;
}
case START_TASKS: {
CCNCFunctions.StartTasksFunction stf = (CCNCFunctions.StartTasksFunction) fn;
- queue.schedule(new StartTasksWork(NodeControllerService.this, stf.getJobId(), stf.getPlanBytes(),
- stf.getTaskDescriptors(), stf.getConnectorPolicies(), stf.getFlags()));
+ queue.schedule(new StartTasksWork(NodeControllerService.this, stf.getDeploymentId(),
+ stf.getJobId(), stf.getPlanBytes(), stf.getTaskDescriptors(), stf.getConnectorPolicies(),
+ stf.getFlags()));
return;
}
@@ -484,14 +488,27 @@
setNodeControllersInfo(gncirf.getNodeControllerInfos());
return;
}
+
+ case DEPLOY_BINARY: {
+ CCNCFunctions.DeployBinaryFunction ndbf = (CCNCFunctions.DeployBinaryFunction) fn;
+ queue.schedule(new DeployBinaryWork(NodeControllerService.this, ndbf.getDeploymentId(), ndbf
+ .getBinaryURLs()));
+ return;
+ }
+
+ case UNDEPLOY_BINARY: {
+ CCNCFunctions.UnDeployBinaryFunction ndbf = (CCNCFunctions.UnDeployBinaryFunction) fn;
+ queue.schedule(new UnDeployBinaryWork(NodeControllerService.this, ndbf.getDeploymentId()));
+ return;
+ }
}
throw new IllegalArgumentException("Unknown function: " + fn.getFunctionId());
}
}
- public void sendApplicationMessageToCC(byte[] data, String nodeId) throws Exception {
- ccs.sendApplicationMessageToCC(data, nodeId);
+ public void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception {
+ ccs.sendApplicationMessageToCC(data, deploymentId, nodeId);
}
public IDatasetPartitionManager getDatasetPartitionManager() {
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
index eefaf54..33519dc 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/Task.java
@@ -14,9 +14,6 @@
*/
package edu.uci.ics.hyracks.control.nc;
-import java.io.ByteArrayOutputStream;
-import java.io.PrintWriter;
-import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
@@ -37,6 +34,7 @@
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.io.FileReference;
@@ -82,11 +80,7 @@
private IOperatorNodePushable operator;
- private volatile boolean failed;
-
- private ByteArrayOutputStream errorBaos;
-
- private PrintWriter errorWriter;
+ private final List<Exception> exceptions;
private List<Throwable> caughtExceptions;
@@ -105,10 +99,7 @@
opEnv = joblet.getEnvironment();
partitionSendProfile = new Hashtable<PartitionId, PartitionProfile>();
pendingThreads = new LinkedHashSet<Thread>();
- failed = false;
- errorBaos = new ByteArrayOutputStream();
- errorWriter = new PrintWriter(errorBaos, true);
- caughtExceptions = new ArrayList<Throwable>();
+ exceptions = new ArrayList<>();
this.ncs = ncs;
}
@@ -256,11 +247,7 @@
pushFrames(collector, writer);
} catch (HyracksDataException e) {
synchronized (Task.this) {
- failed = true;
- caughtExceptions.add(e);
- errorWriter.println("Exception caught by thread: " + thread.getName());
- e.printStackTrace(errorWriter);
- errorWriter.println();
+ exceptions.add(e);
}
} finally {
thread.setName(oldName);
@@ -282,25 +269,15 @@
NodeControllerService ncs = joblet.getNodeController();
ncs.getWorkQueue().schedule(new NotifyTaskCompleteWork(ncs, this));
} catch (Exception e) {
- failed = true;
- caughtExceptions.add(e);
- errorWriter.println("Exception caught by thread: " + ct.getName());
- e.printStackTrace(errorWriter);
- errorWriter.println();
+ exceptions.add(e);
} finally {
ct.setName(threadName);
close();
removePendingThread(ct);
}
- if (failed) {
- errorWriter.close();
+ if (!exceptions.isEmpty()) {
NodeControllerService ncs = joblet.getNodeController();
- try {
- ncs.getWorkQueue().schedule(
- new NotifyTaskFailureWork(ncs, this, errorBaos.toString("UTF-8"), caughtExceptions));
- } catch (UnsupportedEncodingException e) {
- e.printStackTrace();
- }
+ ncs.getWorkQueue().schedule(new NotifyTaskFailureWork(ncs, this, exceptions));
}
}
@@ -362,7 +339,7 @@
}
@Override
- public void sendApplicationMessageToCC(byte[] message, String nodeId) throws Exception {
- this.ncs.sendApplicationMessageToCC(message, nodeId);
+ public void sendApplicationMessageToCC(byte[] message, DeploymentId deploymentId, String nodeId) throws Exception {
+ this.ncs.sendApplicationMessageToCC(message, deploymentId, nodeId);
}
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetMemoryManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetMemoryManager.java
index cecd677..4e27f12 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetMemoryManager.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetMemoryManager.java
@@ -21,12 +21,13 @@
import java.util.Map;
import java.util.Set;
-import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionWriter;
import edu.uci.ics.hyracks.api.dataset.Page;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.partitions.ResultSetPartitionId;
public class DatasetMemoryManager {
+ private int availableMemory;
+
private final Set<Page> availPages;
private final LeastRecentlyUsedList leastRecentlyUsedList;
@@ -36,29 +37,32 @@
private final static int FRAME_SIZE = 32768;
public DatasetMemoryManager(int availableMemory) {
+ this.availableMemory = availableMemory;
+
availPages = new HashSet<Page>();
// Atleast have one page for temporarily storing the results.
- if (availableMemory <= 0)
- availableMemory = FRAME_SIZE;
-
- while (availableMemory >= FRAME_SIZE) {
- /* TODO(madhusudancs): Should we have some way of accounting this memory usage by using Hyrack's allocateFrame()
- * instead of direct ByteBuffer.allocate()?
- */
- availPages.add(new Page(ByteBuffer.allocate(FRAME_SIZE)));
- availableMemory -= FRAME_SIZE;
- }
+ if (this.availableMemory <= FRAME_SIZE)
+ this.availableMemory = FRAME_SIZE;
leastRecentlyUsedList = new LeastRecentlyUsedList();
resultPartitionNodesMap = new HashMap<ResultSetPartitionId, PartitionNode>();
}
- public Page requestPage(ResultSetPartitionId resultSetPartitionId, IDatasetPartitionWriter dpw)
- throws OutOfMemoryError, HyracksDataException {
+ public synchronized Page requestPage(ResultSetPartitionId resultSetPartitionId, ResultState resultState)
+ throws HyracksDataException {
Page page;
if (availPages.isEmpty()) {
- page = evictPage();
+ if (availableMemory >= FRAME_SIZE) {
+ /* TODO(madhusudancs): Should we have some way of accounting this memory usage by using Hyrack's allocateFrame()
+ * instead of direct ByteBuffer.allocate()?
+ */
+ availPages.add(new Page(ByteBuffer.allocate(FRAME_SIZE)));
+ availableMemory -= FRAME_SIZE;
+ page = getAvailablePage();
+ } else {
+ page = evictPage();
+ }
} else {
page = getAvailablePage();
}
@@ -71,7 +75,7 @@
* update reference call before a page is pushed on to the element of the LRU list. So we first obtain the page,
* then make a updateReference call which in turn creates a new node in the LRU list and then add the page to it.
*/
- PartitionNode pn = updateReference(resultSetPartitionId, dpw);
+ PartitionNode pn = updateReference(resultSetPartitionId, resultState);
pn.add(page);
return page;
}
@@ -81,7 +85,7 @@
updateReference(resultSetPartitionId, null);
}
- public int getPageSize() {
+ public static int getPageSize() {
return FRAME_SIZE;
}
@@ -90,28 +94,29 @@
resultPartitionNodesMap.put(resultSetPartitionId, pn);
}
- protected synchronized PartitionNode updateReference(ResultSetPartitionId resultSetPartitionId,
- IDatasetPartitionWriter dpw) {
+ protected PartitionNode updateReference(ResultSetPartitionId resultSetPartitionId, ResultState resultState) {
PartitionNode pn = null;
if (!resultPartitionNodesMap.containsKey(resultSetPartitionId)) {
- if (dpw != null) {
- pn = new PartitionNode(resultSetPartitionId, dpw);
+ if (resultState != null) {
+ pn = new PartitionNode(resultSetPartitionId, resultState);
insertPartitionNode(resultSetPartitionId, pn);
}
return pn;
}
- pn = resultPartitionNodesMap.get(resultSetPartitionId);
- leastRecentlyUsedList.remove(pn);
- insertPartitionNode(resultSetPartitionId, pn);
+ synchronized (this) {
+ pn = resultPartitionNodesMap.get(resultSetPartitionId);
+ leastRecentlyUsedList.remove(pn);
+ insertPartitionNode(resultSetPartitionId, pn);
+ }
return pn;
}
- protected synchronized Page evictPage() throws HyracksDataException {
+ protected Page evictPage() throws HyracksDataException {
PartitionNode pn = leastRecentlyUsedList.getFirst();
- IDatasetPartitionWriter dpw = pn.getDatasetPartitionWriter();
- Page page = dpw.returnPage();
+ ResultState resultState = pn.getResultState();
+ Page page = resultState.returnPage();
/* If the partition holding the pages breaks the contract by not returning the page or it has no page, just take
* away all the pages allocated to it and add to the available pages set.
@@ -140,7 +145,7 @@
return page;
}
- protected synchronized Page getAvailablePage() {
+ protected Page getAvailablePage() {
Iterator<Page> iter = availPages.iterator();
Page page = iter.next();
iter.remove();
@@ -197,15 +202,15 @@
private final ResultSetPartitionId resultSetPartitionId;
- private final IDatasetPartitionWriter datasetPartitionWriter;
+ private final ResultState resultState;
private PartitionNode prev;
private PartitionNode next;
- public PartitionNode(ResultSetPartitionId resultSetPartitionId, IDatasetPartitionWriter datasetPartitionWriter) {
+ public PartitionNode(ResultSetPartitionId resultSetPartitionId, ResultState resultState) {
this.resultSetPartitionId = resultSetPartitionId;
- this.datasetPartitionWriter = datasetPartitionWriter;
+ this.resultState = resultState;
prev = null;
next = null;
}
@@ -214,8 +219,8 @@
return resultSetPartitionId;
}
- public IDatasetPartitionWriter getDatasetPartitionWriter() {
- return datasetPartitionWriter;
+ public ResultState getResultState() {
+ return resultState;
}
public void setPrev(PartitionNode node) {
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionManager.java
index af9a607..2ec7acc 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionManager.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionManager.java
@@ -14,14 +14,15 @@
*/
package edu.uci.ics.hyracks.control.nc.dataset;
+import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
+import java.util.logging.Logger;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
-import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionReader;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
@@ -32,11 +33,13 @@
import edu.uci.ics.hyracks.control.nc.resources.DefaultDeallocatableRegistry;
public class DatasetPartitionManager implements IDatasetPartitionManager {
+ private static final Logger LOGGER = Logger.getLogger(DatasetPartitionManager.class.getName());
+
private final NodeControllerService ncs;
private final Executor executor;
- private final Map<JobId, ResultState[]> partitionResultStateMap;
+ private final Map<JobId, Map<ResultSetId, ResultState[]>> partitionResultStateMap;
private final DefaultDeallocatableRegistry deallocatableRegistry;
@@ -50,18 +53,34 @@
this.executor = executor;
deallocatableRegistry = new DefaultDeallocatableRegistry();
fileFactory = new WorkspaceFileFactory(deallocatableRegistry, (IOManager) ncs.getRootContext().getIOManager());
- datasetMemoryManager = new DatasetMemoryManager(availableMemory);
- partitionResultStateMap = new LinkedHashMap<JobId, ResultState[]>() {
+ if (availableMemory >= DatasetMemoryManager.getPageSize()) {
+ datasetMemoryManager = new DatasetMemoryManager(availableMemory);
+ } else {
+ datasetMemoryManager = null;
+ }
+ partitionResultStateMap = new LinkedHashMap<JobId, Map<ResultSetId, ResultState[]>>() {
private static final long serialVersionUID = 1L;
- protected boolean removeEldestEntry(Map.Entry<JobId, ResultState[]> eldest) {
- if (size() > resultHistorySize) {
- for (ResultState state : eldest.getValue()) {
- state.deinit();
+ protected boolean removeEldestEntry(Map.Entry<JobId, Map<ResultSetId, ResultState[]>> eldest) {
+ synchronized (DatasetPartitionManager.this) {
+ if (size() > resultHistorySize) {
+ Map<ResultSetId, ResultState[]> rsIdMap = partitionResultStateMap.get(eldest.getValue());
+ for (ResultSetId rsId : rsIdMap.keySet()) {
+ ResultState[] resultStates = rsIdMap.get(rsId);
+ if (resultStates != null) {
+ for (int i = 0; i < resultStates.length; i++) {
+ ResultState state = resultStates[i];
+ if (state != null) {
+ state.closeAndDelete();
+ LOGGER.fine("Removing partition: " + i + " for JobId: " + eldest.getKey());
+ }
+ }
+ }
+ }
+ return true;
}
- return true;
+ return false;
}
- return false;
}
};
}
@@ -72,15 +91,21 @@
DatasetPartitionWriter dpw = null;
JobId jobId = ctx.getJobletContext().getJobId();
try {
- synchronized (partitionResultStateMap) {
+ synchronized (this) {
ncs.getClusterController().registerResultPartitionLocation(jobId, rsId, orderedResult, partition,
nPartitions, ncs.getDatasetNetworkManager().getNetworkAddress());
- dpw = new DatasetPartitionWriter(ctx, this, jobId, rsId, partition, datasetMemoryManager);
+ dpw = new DatasetPartitionWriter(ctx, this, jobId, rsId, partition, datasetMemoryManager, fileFactory);
- ResultState[] resultStates = partitionResultStateMap.get(jobId);
+ Map<ResultSetId, ResultState[]> rsIdMap = partitionResultStateMap.get(jobId);
+ if (rsIdMap == null) {
+ rsIdMap = new HashMap<ResultSetId, ResultState[]>();
+ partitionResultStateMap.put(jobId, rsIdMap);
+ }
+
+ ResultState[] resultStates = rsIdMap.get(rsId);
if (resultStates == null) {
resultStates = new ResultState[nPartitions];
- partitionResultStateMap.put(jobId, resultStates);
+ rsIdMap.put(rsId, resultStates);
}
resultStates[partition] = dpw.getResultState();
}
@@ -88,12 +113,15 @@
throw new HyracksException(e);
}
+ LOGGER.fine("Initialized partition writer: JobId: " + jobId + ":partition: " + partition);
return dpw;
}
@Override
public void reportPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws HyracksException {
try {
+ LOGGER.fine("Reporting partition write completion: JobId: " + jobId + ": ResultSetId: " + rsId
+ + ":partition: " + partition);
ncs.getClusterController().reportResultPartitionWriteCompletion(jobId, rsId, partition);
} catch (Exception e) {
throw new HyracksException(e);
@@ -103,6 +131,8 @@
@Override
public void reportPartitionFailure(JobId jobId, ResultSetId rsId, int partition) throws HyracksException {
try {
+ LOGGER.info("Reporting partition failure: JobId: " + jobId + ": ResultSetId: " + rsId + ":partition: "
+ + partition);
ncs.getClusterController().reportResultPartitionFailure(jobId, rsId, partition);
} catch (Exception e) {
throw new HyracksException(e);
@@ -110,24 +140,51 @@
}
@Override
- public void initializeDatasetPartitionReader(JobId jobId, int partition, IFrameWriter writer)
- throws HyracksException {
+ public void initializeDatasetPartitionReader(JobId jobId, ResultSetId resultSetId, int partition,
+ IFrameWriter writer) throws HyracksException {
ResultState resultState;
- synchronized (partitionResultStateMap) {
- ResultState[] resultStates = partitionResultStateMap.get(jobId);
+ synchronized (this) {
+ Map<ResultSetId, ResultState[]> rsIdMap = partitionResultStateMap.get(jobId);
- if (resultStates == null) {
+ if (rsIdMap == null) {
throw new HyracksException("Unknown JobId " + jobId);
}
+ ResultState[] resultStates = rsIdMap.get(resultSetId);
+ if (resultStates == null) {
+ throw new HyracksException("Unknown JobId: " + jobId + " ResultSetId: " + resultSetId);
+ }
+
resultState = resultStates[partition];
if (resultState == null) {
throw new HyracksException("No DatasetPartitionWriter for partition " + partition);
}
}
- IDatasetPartitionReader dpr = new DatasetPartitionReader(datasetMemoryManager, executor, resultState);
+ DatasetPartitionReader dpr = new DatasetPartitionReader(datasetMemoryManager, executor, resultState);
dpr.writeTo(writer);
+ LOGGER.fine("Initialized partition reader: JobId: " + jobId + ":ResultSetId: " + resultSetId + ":partition: "
+ + partition);
+ }
+
+ @Override
+ public synchronized void abortReader(JobId jobId) {
+ Map<ResultSetId, ResultState[]> rsIdMap = partitionResultStateMap.get(jobId);
+
+ if (rsIdMap == null) {
+ return;
+ }
+
+ for (ResultSetId rsId : rsIdMap.keySet()) {
+ ResultState[] resultStates = rsIdMap.get(rsId);
+ if (resultStates != null) {
+ for (ResultState state : resultStates) {
+ if (state != null) {
+ state.abort();
+ }
+ }
+ }
+ }
}
@Override
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionReader.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionReader.java
index a584b4b..07624de 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionReader.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionReader.java
@@ -20,14 +20,10 @@
import java.util.logging.Logger;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionReader;
-import edu.uci.ics.hyracks.api.dataset.Page;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.IFileHandle;
-import edu.uci.ics.hyracks.api.io.IIOManager;
import edu.uci.ics.hyracks.comm.channels.NetworkOutputChannel;
-public class DatasetPartitionReader implements IDatasetPartitionReader {
+public class DatasetPartitionReader {
private static final Logger LOGGER = Logger.getLogger(DatasetPartitionReader.class.getName());
private final DatasetMemoryManager datasetMemoryManager;
@@ -36,51 +32,12 @@
private final ResultState resultState;
- private IFileHandle fileHandle;
-
public DatasetPartitionReader(DatasetMemoryManager datasetMemoryManager, Executor executor, ResultState resultState) {
this.datasetMemoryManager = datasetMemoryManager;
this.executor = executor;
this.resultState = resultState;
}
- private long read(long offset, ByteBuffer buffer) throws HyracksDataException {
- long readSize = 0;
- synchronized (resultState) {
- while (offset >= resultState.getSize() && !resultState.getEOS()) {
- try {
- resultState.wait();
- } catch (InterruptedException e) {
- throw new HyracksDataException(e);
- }
- }
- }
-
- if (offset >= resultState.getSize() && resultState.getEOS()) {
- return readSize;
- }
-
- if (offset < resultState.getPersistentSize()) {
- readSize = resultState.getIOManager().syncRead(fileHandle, offset, buffer);
- }
-
- if (readSize < buffer.capacity()) {
- long localPageOffset = offset - resultState.getPersistentSize();
- int localPageIndex = (int) (localPageOffset / datasetMemoryManager.getPageSize());
- int pageOffset = (int) (localPageOffset % datasetMemoryManager.getPageSize());
- Page page = resultState.getPage(localPageIndex);
- if (page == null) {
- return readSize;
- }
- readSize += buffer.remaining();
- buffer.put(page.getBuffer().array(), pageOffset, buffer.remaining());
- }
-
- datasetMemoryManager.pageReferenced(resultState.getResultSetPartitionId());
- return readSize;
- }
-
- @Override
public void writeTo(final IFrameWriter writer) {
executor.execute(new Runnable() {
@Override
@@ -88,8 +45,7 @@
NetworkOutputChannel channel = (NetworkOutputChannel) writer;
channel.setFrameSize(resultState.getFrameSize());
try {
- fileHandle = resultState.getIOManager().open(resultState.getValidFileReference(),
- IIOManager.FileReadWriteMode.READ_ONLY, IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
+ resultState.readOpen();
channel.open();
try {
long offset = 0;
@@ -109,10 +65,8 @@
}
} finally {
channel.close();
- resultState.getIOManager().close(fileHandle);
+ resultState.readClose();
}
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
} catch (HyracksDataException e) {
throw new RuntimeException(e);
}
@@ -120,6 +74,14 @@
LOGGER.info("result reading successful(" + resultState.getResultSetPartitionId() + ")");
}
}
+
+ private long read(long offset, ByteBuffer buffer) throws HyracksDataException {
+ if (datasetMemoryManager == null) {
+ return resultState.read(offset, buffer);
+ } else {
+ return resultState.read(datasetMemoryManager, offset, buffer);
+ }
+ }
});
}
}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionWriter.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionWriter.java
index 317f553..8f4b639 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionWriter.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/DatasetPartitionWriter.java
@@ -18,24 +18,19 @@
import java.util.logging.Level;
import java.util.logging.Logger;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
-import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionWriter;
-import edu.uci.ics.hyracks.api.dataset.Page;
import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.io.IFileHandle;
-import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.partitions.ResultSetPartitionId;
-public class DatasetPartitionWriter implements IDatasetPartitionWriter {
+public class DatasetPartitionWriter implements IFrameWriter {
private static final Logger LOGGER = Logger.getLogger(DatasetPartitionWriter.class.getName());
- private static final String FILE_PREFIX = "result_";
-
private final IDatasetPartitionManager manager;
private final JobId jobId;
@@ -50,10 +45,9 @@
private final ResultState resultState;
- private IFileHandle fileHandle;
-
public DatasetPartitionWriter(IHyracksTaskContext ctx, IDatasetPartitionManager manager, JobId jobId,
- ResultSetId rsId, int partition, DatasetMemoryManager datasetMemoryManager) {
+ ResultSetId rsId, int partition, DatasetMemoryManager datasetMemoryManager,
+ IWorkspaceFileFactory fileFactory) {
this.manager = manager;
this.jobId = jobId;
this.resultSetId = rsId;
@@ -61,7 +55,7 @@
this.datasetMemoryManager = datasetMemoryManager;
resultSetPartitionId = new ResultSetPartitionId(jobId, rsId, partition);
- resultState = new ResultState(resultSetPartitionId, ctx.getIOManager(), ctx.getFrameSize());
+ resultState = new ResultState(resultSetPartitionId, ctx.getIOManager(), fileFactory, ctx.getFrameSize());
}
public ResultState getResultState() {
@@ -69,41 +63,27 @@
}
@Override
- public void open() throws HyracksDataException {
+ public void open() {
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("open(" + partition + ")");
}
- String fName = FILE_PREFIX + String.valueOf(partition);
- FileReference fRef = manager.getFileFactory().createUnmanagedWorkspaceFile(fName);
- fileHandle = resultState.getIOManager().open(fRef, IIOManager.FileReadWriteMode.READ_WRITE,
- IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
- resultState.init(fRef, fileHandle);
+ resultState.open();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- int srcOffset = 0;
- Page destPage = resultState.getLastPage();
-
- while (srcOffset < buffer.limit()) {
- if ((destPage == null) || (destPage.getBuffer().remaining() <= 0)) {
- destPage = datasetMemoryManager.requestPage(resultSetPartitionId, this);
- resultState.addPage(destPage);
- }
- int srcLength = Math.min(buffer.limit() - srcOffset, destPage.getBuffer().remaining());
- destPage.getBuffer().put(buffer.array(), srcOffset, srcLength);
- srcOffset += srcLength;
- resultState.incrementSize(srcLength);
- }
-
- synchronized (resultState) {
- resultState.notifyAll();
+ if (datasetMemoryManager == null) {
+ resultState.write(buffer);
+ } else {
+ resultState.write(datasetMemoryManager, buffer);
}
}
@Override
public void fail() throws HyracksDataException {
try {
+ resultState.closeAndDelete();
+ resultState.abort();
manager.reportPartitionFailure(jobId, resultSetId, partition);
} catch (HyracksException e) {
throw new HyracksDataException(e);
@@ -117,32 +97,10 @@
}
try {
- synchronized (resultState) {
- resultState.setEOS(true);
- resultState.notifyAll();
- }
+ resultState.close();
manager.reportPartitionWriteCompletion(jobId, resultSetId, partition);
} catch (HyracksException e) {
throw new HyracksDataException(e);
}
}
-
- @Override
- public Page returnPage() throws HyracksDataException {
- Page page = resultState.removePage(0);
-
- IIOManager ioManager = resultState.getIOManager();
-
- // If we do not have any pages to be given back close the write channel since we don't write any more, return null.
- if (page == null) {
- ioManager.close(fileHandle);
- return null;
- }
-
- page.getBuffer().flip();
-
- long delta = ioManager.syncWrite(fileHandle, resultState.getPersistentSize(), page.getBuffer());
- resultState.incrementPersistentSize(delta);
- return page;
- }
}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/ResultState.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/ResultState.java
index 661df93..911f372 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/ResultState.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/dataset/ResultState.java
@@ -17,28 +17,35 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
import edu.uci.ics.hyracks.api.dataset.Page;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.io.IFileHandle;
import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.io.IWorkspaceFileFactory;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.partitions.ResultSetPartitionId;
public class ResultState implements IStateObject {
+ private static final String FILE_PREFIX = "result_";
+
private final ResultSetPartitionId resultSetPartitionId;
private final int frameSize;
private final IIOManager ioManager;
+ private final IWorkspaceFileFactory fileFactory;
+
private final AtomicBoolean eos;
- private final AtomicBoolean readEOS;
+ private final AtomicBoolean failed;
private final List<Page> localPageList;
@@ -46,29 +53,40 @@
private IFileHandle writeFileHandle;
+ private IFileHandle readFileHandle;
+
private long size;
private long persistentSize;
- ResultState(ResultSetPartitionId resultSetPartitionId, IIOManager ioManager, int frameSize) {
+ ResultState(ResultSetPartitionId resultSetPartitionId, IIOManager ioManager, IWorkspaceFileFactory fileFactory,
+ int frameSize) {
this.resultSetPartitionId = resultSetPartitionId;
this.ioManager = ioManager;
+ this.fileFactory = fileFactory;
this.frameSize = frameSize;
eos = new AtomicBoolean(false);
- readEOS = new AtomicBoolean(false);
+ failed = new AtomicBoolean(false);
localPageList = new ArrayList<Page>();
+
+ fileRef = null;
+ writeFileHandle = null;
}
- public synchronized void init(FileReference fileRef, IFileHandle writeFileHandle) {
- this.fileRef = fileRef;
- this.writeFileHandle = writeFileHandle;
-
+ public synchronized void open() {
size = 0;
persistentSize = 0;
+ }
+
+ public synchronized void close() {
+ eos.set(true);
notifyAll();
}
- public synchronized void deinit() {
+ public synchronized void closeAndDelete() {
+ // Deleting a job is equivalent to aborting the job for all practical purposes, so the same action, needs
+ // to be taken when there are more requests to these result states.
+ failed.set(true);
if (writeFileHandle != null) {
try {
ioManager.close(writeFileHandle);
@@ -76,7 +94,149 @@
// Since file handle could not be closed, just ignore.
}
}
- fileRef.delete();
+ if (fileRef != null) {
+ fileRef.delete();
+ }
+ }
+
+ public synchronized void write(ByteBuffer buffer) throws HyracksDataException {
+ if (fileRef == null) {
+ String fName = FILE_PREFIX + String.valueOf(resultSetPartitionId.getPartition());
+ fileRef = fileFactory.createUnmanagedWorkspaceFile(fName);
+ writeFileHandle = ioManager.open(fileRef, IIOManager.FileReadWriteMode.READ_WRITE,
+ IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
+ }
+
+ size += ioManager.syncWrite(writeFileHandle, size, buffer);
+
+ notifyAll();
+ }
+
+ public synchronized void write(DatasetMemoryManager datasetMemoryManager, ByteBuffer buffer)
+ throws HyracksDataException {
+ int srcOffset = 0;
+ Page destPage = null;
+
+ if (!localPageList.isEmpty()) {
+ destPage = localPageList.get(localPageList.size() - 1);
+ }
+
+ while (srcOffset < buffer.limit()) {
+ if ((destPage == null) || (destPage.getBuffer().remaining() <= 0)) {
+ destPage = datasetMemoryManager.requestPage(resultSetPartitionId, this);
+ localPageList.add(destPage);
+ }
+ int srcLength = Math.min(buffer.limit() - srcOffset, destPage.getBuffer().remaining());
+ destPage.getBuffer().put(buffer.array(), srcOffset, srcLength);
+ srcOffset += srcLength;
+ size += srcLength;
+ }
+
+ notifyAll();
+ }
+
+ public synchronized void readOpen() {
+ // It is a noOp for now, leaving here to keep the API stable for future usage.
+ }
+
+ public synchronized void readClose() throws HyracksDataException {
+ if (readFileHandle != null) {
+ ioManager.close(readFileHandle);
+ }
+ }
+
+ public synchronized long read(long offset, ByteBuffer buffer) throws HyracksDataException {
+ long readSize = 0;
+
+ while (offset >= size && !eos.get() && !failed.get()) {
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+ if ((offset >= size && eos.get()) || failed.get()) {
+ return readSize;
+ }
+
+ if (readFileHandle == null) {
+ initReadFileHandle();
+ }
+ readSize = ioManager.syncRead(readFileHandle, offset, buffer);
+
+ return readSize;
+ }
+
+ public long read(DatasetMemoryManager datasetMemoryManager, long offset, ByteBuffer buffer)
+ throws HyracksDataException {
+ long readSize = 0;
+ synchronized (this) {
+ while (offset >= size && !eos.get() && !failed.get()) {
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ if ((offset >= size && eos.get()) || failed.get()) {
+ return readSize;
+ }
+
+ if (offset < persistentSize) {
+ if (readFileHandle == null) {
+ initReadFileHandle();
+ }
+ readSize = ioManager.syncRead(readFileHandle, offset, buffer);
+ }
+
+ if (readSize < buffer.capacity()) {
+ long localPageOffset = offset - persistentSize;
+ int localPageIndex = (int) (localPageOffset / DatasetMemoryManager.getPageSize());
+ int pageOffset = (int) (localPageOffset % DatasetMemoryManager.getPageSize());
+ Page page = getPage(localPageIndex);
+ if (page == null) {
+ return readSize;
+ }
+ readSize += buffer.remaining();
+ buffer.put(page.getBuffer().array(), pageOffset, buffer.remaining());
+ }
+ }
+ datasetMemoryManager.pageReferenced(resultSetPartitionId);
+ return readSize;
+ }
+
+ public synchronized void abort() {
+ failed.set(true);
+ notifyAll();
+ }
+
+ public synchronized Page returnPage() throws HyracksDataException {
+ Page page = removePage();
+
+ // If we do not have any pages to be given back close the write channel since we don't write any more, return null.
+ if (page == null) {
+ ioManager.close(writeFileHandle);
+ return null;
+ }
+
+ page.getBuffer().flip();
+
+ if (fileRef == null) {
+ String fName = FILE_PREFIX + String.valueOf(resultSetPartitionId.getPartition());
+ fileRef = fileFactory.createUnmanagedWorkspaceFile(fName);
+ writeFileHandle = ioManager.open(fileRef, IIOManager.FileReadWriteMode.READ_WRITE,
+ IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
+ notifyAll();
+ }
+
+ long delta = ioManager.syncWrite(writeFileHandle, persistentSize, page.getBuffer());
+ persistentSize += delta;
+ return page;
+ }
+
+ public synchronized void setEOS(boolean eos) {
+ this.eos.set(eos);
}
public ResultSetPartitionId getResultSetPartitionId() {
@@ -91,76 +251,6 @@
return ioManager;
}
- public synchronized void incrementSize(long delta) {
- size += delta;
- }
-
- public synchronized long getSize() {
- return size;
- }
-
- public synchronized void incrementPersistentSize(long delta) {
- persistentSize += delta;
- }
-
- public synchronized long getPersistentSize() {
- return persistentSize;
- }
-
- public void setEOS(boolean eos) {
- this.eos.set(eos);
- }
-
- public boolean getEOS() {
- return eos.get();
- }
-
- public boolean getReadEOS() {
- return readEOS.get();
- }
-
- public synchronized void addPage(Page page) {
- localPageList.add(page);
- }
-
- public synchronized Page removePage(int index) {
- Page page = null;
- if (!localPageList.isEmpty()) {
- page = localPageList.remove(index);
- }
- return page;
- }
-
- public synchronized Page getPage(int index) {
- Page page = null;
- if (!localPageList.isEmpty()) {
- page = localPageList.get(index);
- }
- return page;
- }
-
- public synchronized Page getLastPage() {
- Page page = null;
- if (!localPageList.isEmpty()) {
- page = localPageList.get(localPageList.size() - 1);
- }
- return page;
- }
-
- public synchronized Page getFirstPage() {
- Page page = null;
- if (!localPageList.isEmpty()) {
- page = localPageList.get(0);
- }
- return page;
- }
-
- public synchronized FileReference getValidFileReference() throws InterruptedException {
- while (fileRef == null)
- wait();
- return fileRef;
- }
-
@Override
public JobId getJobId() {
return resultSetPartitionId.getJobId();
@@ -185,4 +275,36 @@
public void fromBytes(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
+
+ private Page getPage(int index) {
+ Page page = null;
+ if (!localPageList.isEmpty()) {
+ page = localPageList.get(index);
+ }
+ return page;
+ }
+
+ private Page removePage() {
+ Page page = null;
+ if (!localPageList.isEmpty()) {
+ page = localPageList.remove(localPageList.size() - 1);
+ }
+ return page;
+ }
+
+ private void initReadFileHandle() throws HyracksDataException {
+ while (fileRef == null && !failed.get()) {
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+ if (failed.get()) {
+ return;
+ }
+
+ readFileHandle = ioManager.open(fileRef, IIOManager.FileReadWriteMode.READ_ONLY,
+ IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/DatasetNetworkManager.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/DatasetNetworkManager.java
index 5b8b333..84baf49 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/DatasetNetworkManager.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/net/DatasetNetworkManager.java
@@ -24,6 +24,7 @@
import edu.uci.ics.hyracks.api.comm.NetworkAddress;
import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.dataset.ResultSetId;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.comm.channels.IChannelConnectionFactory;
@@ -95,6 +96,7 @@
@Override
public void accept(ByteBuffer buffer) {
JobId jobId = new JobId(buffer.getLong());
+ ResultSetId rsId = new ResultSetId(buffer.getLong());
int partition = buffer.getInt();
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.fine("Received initial dataset partition read request for JobId: " + jobId + " partition: "
@@ -102,7 +104,7 @@
}
noc = new NetworkOutputChannel(ccb, 1);
try {
- partitionManager.initializeDatasetPartitionReader(jobId, partition, noc);
+ partitionManager.initializeDatasetPartitionReader(jobId, rsId, partition, noc);
} catch (HyracksException e) {
noc.abort();
}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java
index 8f8c032..54ac99a 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/AbortTasksWork.java
@@ -20,6 +20,7 @@
import java.util.logging.Logger;
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.control.common.work.AbstractWork;
import edu.uci.ics.hyracks.control.nc.Joblet;
@@ -46,6 +47,11 @@
if (LOGGER.isLoggable(Level.INFO)) {
LOGGER.info("Aborting Tasks: " + jobId + ":" + tasks);
}
+ IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
+ if (dpm != null) {
+ ncs.getDatasetPartitionManager().abortReader(jobId);
+ }
+
Map<JobId, Joblet> jobletMap = ncs.getJobletMap();
Joblet ji = jobletMap.get(jobId);
if (ji != null) {
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java
index a1499b8..19a5a81 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/ApplicationMessageWork.java
@@ -14,12 +14,12 @@
*/
package edu.uci.ics.hyracks.control.nc.work;
-import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.messages.IMessage;
-import edu.uci.ics.hyracks.api.util.JavaSerializationUtils;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
import edu.uci.ics.hyracks.control.common.work.AbstractWork;
import edu.uci.ics.hyracks.control.nc.NodeControllerService;
import edu.uci.ics.hyracks.control.nc.application.NCApplicationContext;
@@ -30,11 +30,13 @@
public class ApplicationMessageWork extends AbstractWork {
private static final Logger LOGGER = Logger.getLogger(ApplicationMessageWork.class.getName());
private byte[] message;
+ private DeploymentId deploymentId;
private String nodeId;
private NodeControllerService ncs;
- public ApplicationMessageWork(NodeControllerService ncs, byte[] message, String nodeId) {
+ public ApplicationMessageWork(NodeControllerService ncs, byte[] message, DeploymentId deploymentId, String nodeId) {
this.ncs = ncs;
+ this.deploymentId = deploymentId;
this.nodeId = nodeId;
this.message = message;
}
@@ -43,16 +45,15 @@
public void run() {
NCApplicationContext ctx = ncs.getApplicationContext();
try {
- IMessage data = (IMessage) JavaSerializationUtils.deserialize(message);
+ IMessage data = (IMessage) DeploymentUtils.deserialize(message, deploymentId, ctx);;
if (ctx.getMessageBroker() != null) {
ctx.getMessageBroker().receivedMessage(data, nodeId);
} else {
LOGGER.log(Level.WARNING, "Messsage was sent, but no Message Broker set!");
}
- } catch (IOException e) {
+ } catch (Exception e) {
Logger.getLogger(this.getClass().getName()).log(Level.WARNING, "Error in application message delivery!", e);
- } catch (ClassNotFoundException e) {
- Logger.getLogger(this.getClass().getName()).log(Level.WARNING, "Error in application message delivery!", e);
+ throw new RuntimeException(e);
}
}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/DeployBinaryWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/DeployBinaryWork.java
new file mode 100644
index 0000000..e5f1bc1
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/DeployBinaryWork.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.nc.work;
+
+import java.net.URL;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+import edu.uci.ics.hyracks.control.common.base.IClusterController;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentStatus;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
+import edu.uci.ics.hyracks.control.common.work.AbstractWork;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+/**
+ * Deploy binary work that happens at each NC
+ *
+ * @author yingyib
+ *
+ */
+public class DeployBinaryWork extends AbstractWork {
+
+ private DeploymentId deploymentId;
+ private NodeControllerService ncs;
+ private List<URL> binaryURLs;
+
+ public DeployBinaryWork(NodeControllerService ncs, DeploymentId deploymentId, List<URL> binaryURLs) {
+ this.deploymentId = deploymentId;
+ this.ncs = ncs;
+ this.binaryURLs = binaryURLs;
+ }
+
+ @Override
+ public void run() {
+ DeploymentStatus status;
+ try {
+ DeploymentUtils.deploy(deploymentId, binaryURLs, ncs.getApplicationContext()
+ .getJobSerializerDeserializerContainer(), ncs.getServerContext(), true);
+ status = DeploymentStatus.SUCCEED;
+ } catch (Exception e) {
+ status = DeploymentStatus.FAIL;
+ e.printStackTrace();
+ }
+ try {
+ IClusterController ccs = ncs.getClusterController();
+ ccs.notifyDeployBinary(deploymentId, ncs.getId(), status);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java
index 2235224..d96872d 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/NotifyTaskFailureWork.java
@@ -16,6 +16,8 @@
import java.util.List;
+import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.control.common.work.AbstractWork;
import edu.uci.ics.hyracks.control.nc.NodeControllerService;
import edu.uci.ics.hyracks.control.nc.Task;
@@ -23,21 +25,24 @@
public class NotifyTaskFailureWork extends AbstractWork {
private final NodeControllerService ncs;
private final Task task;
- private final String details;
- private final List<Throwable> caughtExceptions;
- public NotifyTaskFailureWork(NodeControllerService ncs, Task task, String details, List<Throwable> caughtExceptions) {
+ private final List<Exception> exceptions;
+
+ public NotifyTaskFailureWork(NodeControllerService ncs, Task task, List<Exception> exceptions) {
this.ncs = ncs;
this.task = task;
- this.details = details;
- this.caughtExceptions = caughtExceptions;
+ this.exceptions = exceptions;
}
@Override
public void run() {
try {
- ncs.getClusterController().notifyTaskFailure(task.getJobletContext().getJobId(), task.getTaskAttemptId(),
- ncs.getId(), details, caughtExceptions);
+ JobId jobId = task.getJobletContext().getJobId();
+ IDatasetPartitionManager dpm = ncs.getDatasetPartitionManager();
+ if (dpm != null) {
+ dpm.abortReader(jobId);
+ }
+ ncs.getClusterController().notifyTaskFailure(jobId, task.getTaskAttemptId(), ncs.getId(), exceptions);
} catch (Exception e) {
e.printStackTrace();
}
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java
index fffecc2..2c6d38e 100644
--- a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/StartTasksWork.java
@@ -36,13 +36,14 @@
import edu.uci.ics.hyracks.api.dataflow.connectors.IConnectorPolicy;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.job.ActivityCluster;
import edu.uci.ics.hyracks.api.job.ActivityClusterGraph;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
import edu.uci.ics.hyracks.api.partitions.PartitionId;
-import edu.uci.ics.hyracks.api.util.JavaSerializationUtils;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
import edu.uci.ics.hyracks.control.common.job.TaskAttemptDescriptor;
import edu.uci.ics.hyracks.control.common.work.AbstractWork;
import edu.uci.ics.hyracks.control.nc.Joblet;
@@ -60,6 +61,8 @@
private final NodeControllerService ncs;
+ private final DeploymentId deploymentId;
+
private final JobId jobId;
private final byte[] acgBytes;
@@ -70,10 +73,11 @@
private final EnumSet<JobFlag> flags;
- public StartTasksWork(NodeControllerService ncs, JobId jobId, byte[] acgBytes,
+ public StartTasksWork(NodeControllerService ncs, DeploymentId deploymentId, JobId jobId, byte[] acgBytes,
List<TaskAttemptDescriptor> taskDescriptors,
Map<ConnectorDescriptorId, IConnectorPolicy> connectorPoliciesMap, EnumSet<JobFlag> flags) {
this.ncs = ncs;
+ this.deploymentId = deploymentId;
this.jobId = jobId;
this.acgBytes = acgBytes;
this.taskDescriptors = taskDescriptors;
@@ -85,8 +89,8 @@
public void run() {
try {
NCApplicationContext appCtx = ncs.getApplicationContext();
- final Joblet joblet = getOrCreateLocalJoblet(jobId, appCtx, acgBytes == null ? null
- : (ActivityClusterGraph) JavaSerializationUtils.deserialize(acgBytes));
+ final Joblet joblet = getOrCreateLocalJoblet(deploymentId, jobId, appCtx, acgBytes == null ? null
+ : (ActivityClusterGraph) DeploymentUtils.deserialize(acgBytes, deploymentId, appCtx));
final ActivityClusterGraph acg = joblet.getActivityClusterGraph();
IRecordDescriptorProvider rdp = new IRecordDescriptorProvider() {
@@ -164,15 +168,15 @@
}
}
- private Joblet getOrCreateLocalJoblet(JobId jobId, INCApplicationContext appCtx, ActivityClusterGraph acg)
- throws Exception {
+ private Joblet getOrCreateLocalJoblet(DeploymentId deploymentId, JobId jobId, INCApplicationContext appCtx,
+ ActivityClusterGraph acg) throws Exception {
Map<JobId, Joblet> jobletMap = ncs.getJobletMap();
Joblet ji = jobletMap.get(jobId);
if (ji == null) {
if (acg == null) {
throw new NullPointerException("JobActivityGraph was null");
}
- ji = new Joblet(ncs, jobId, appCtx, acg);
+ ji = new Joblet(ncs, deploymentId, jobId, appCtx, acg);
jobletMap.put(jobId, ji);
}
return ji;
diff --git a/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/UnDeployBinaryWork.java b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/UnDeployBinaryWork.java
new file mode 100644
index 0000000..605d63f
--- /dev/null
+++ b/hyracks/hyracks-control/hyracks-control-nc/src/main/java/edu/uci/ics/hyracks/control/nc/work/UnDeployBinaryWork.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.control.nc.work;
+
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
+import edu.uci.ics.hyracks.control.common.base.IClusterController;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentStatus;
+import edu.uci.ics.hyracks.control.common.deployment.DeploymentUtils;
+import edu.uci.ics.hyracks.control.common.work.AbstractWork;
+import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+
+/**
+ * undeploy binaries regarding to a deployment id
+ *
+ * @author yingyib
+ */
+public class UnDeployBinaryWork extends AbstractWork {
+
+ private DeploymentId deploymentId;
+ private NodeControllerService ncs;
+
+ public UnDeployBinaryWork(NodeControllerService ncs, DeploymentId deploymentId) {
+ this.deploymentId = deploymentId;
+ this.ncs = ncs;
+ }
+
+ @Override
+ public void run() {
+ DeploymentStatus status;
+ try {
+ DeploymentUtils.undeploy(deploymentId, ncs.getApplicationContext().getJobSerializerDeserializerContainer(),
+ ncs.getServerContext());
+ status = DeploymentStatus.SUCCEED;
+ } catch (Exception e) {
+ status = DeploymentStatus.FAIL;
+ }
+ try {
+ IClusterController ccs = ncs.getClusterController();
+ ccs.notifyDeployBinary(deploymentId, ncs.getId(), status);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+}
diff --git a/hyracks/hyracks-control/pom.xml b/hyracks/hyracks-control/pom.xml
index 5696ffe..9141bf5 100644
--- a/hyracks/hyracks-control/pom.xml
+++ b/hyracks/hyracks-control/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/hyracks-data/hyracks-data-std/pom.xml b/hyracks/hyracks-data/hyracks-data-std/pom.xml
index 9152d0d..b71548a 100644
--- a/hyracks/hyracks-data/hyracks-data-std/pom.xml
+++ b/hyracks/hyracks-data/hyracks-data-std/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,7 +27,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/MurmurHash3BinaryHashFunctionFamily.java b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/MurmurHash3BinaryHashFunctionFamily.java
index 8f2e32c..588ad00 100644
--- a/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/MurmurHash3BinaryHashFunctionFamily.java
+++ b/hyracks/hyracks-data/hyracks-data-std/src/main/java/edu/uci/ics/hyracks/data/std/accessors/MurmurHash3BinaryHashFunctionFamily.java
@@ -3,9 +3,23 @@
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFamily;
-public class MurmurHash3BinaryHashFunctionFamily implements IBinaryHashFunctionFamily {
+/**
+ * An implementation of the Murmur3 hash family. The code is implemented based
+ * on the original <a
+ * href=http://code.google.com/p/guava-libraries/source/browse
+ * /guava/src/com/google/common/hash/Murmur3_32HashFunction.java>guava
+ * implementation</a> from Google Guava library.
+ */
+public class MurmurHash3BinaryHashFunctionFamily implements
+ IBinaryHashFunctionFamily {
+
+ public static final IBinaryHashFunctionFamily INSTANCE = new MurmurHash3BinaryHashFunctionFamily();
+
private static final long serialVersionUID = 1L;
+ private MurmurHash3BinaryHashFunctionFamily() {
+ }
+
private static final int C1 = 0xcc9e2d51;
private static final int C2 = 0x1b873593;
private static final int C3 = 5;
@@ -21,9 +35,10 @@
int h = seed;
int p = offset;
int remain = length;
- while (remain > 4) {
- int k = ((int) bytes[p]) | (((int) bytes[p + 1]) << 8) | (((int) bytes[p + 2]) << 16)
- | (((int) bytes[p + 3]) << 24);
+ while (remain >= 4) {
+ int k = (bytes[p] & 0xff) | ((bytes[p + 1] & 0xff) << 8)
+ | ((bytes[p + 2] & 0xff) << 16)
+ | ((bytes[p + 3] & 0xff) << 24);
k *= C1;
k = Integer.rotateLeft(k, 15);
k *= C2;
@@ -33,20 +48,16 @@
p += 4;
remain -= 4;
}
- int k = 0;
- switch (remain) {
- case 3:
- k = bytes[p++];
- case 2:
- k = (k << 8) | bytes[p++];
- case 1:
- k = (k << 8) | bytes[p++];
- k *= C1;
- k = Integer.rotateLeft(k, 15);
- k *= C2;
- h ^= k;
- h = Integer.rotateLeft(h, 13);
- h = h * C3 + C4;
+ if (remain > 0) {
+ int k = 0;
+ for (int i = 0; remain > 0; i += 8) {
+ k ^= (bytes[p++] & 0xff) << i;
+ remain--;
+ }
+ k *= C1;
+ k = Integer.rotateLeft(k, 15);
+ k *= C2;
+ h ^= k;
}
h ^= length;
h ^= (h >>> 16);
diff --git a/hyracks/hyracks-data/pom.xml b/hyracks/hyracks-data/pom.xml
index 2af86c9..0df5325 100644
--- a/hyracks/hyracks-data/pom.xml
+++ b/hyracks/hyracks-data/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/hyracks-dataflow-common/pom.xml b/hyracks/hyracks-dataflow-common/pom.xml
index 4afb088..a87336d 100644
--- a/hyracks/hyracks-dataflow-common/pom.xml
+++ b/hyracks/hyracks-dataflow-common/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -26,14 +26,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameOutputStream.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameOutputStream.java
index 07f6ba2..c403501 100644
--- a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameOutputStream.java
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/FrameOutputStream.java
@@ -32,6 +32,9 @@
}
public void reset(ByteBuffer buffer, boolean clear) {
+ if (clear) {
+ buffer.clear();
+ }
frameTupleAppender.reset(buffer, clear);
}
diff --git a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
index cbe2ae6..bebef21 100644
--- a/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
+++ b/hyracks/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/SerializingDataWriter.java
@@ -88,7 +88,7 @@
flushFrame();
tupleAppender.reset(buffer, true);
if (!tupleAppender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + buffer.capacity() + ")");
}
}
}
@@ -103,4 +103,4 @@
public void fail() throws HyracksDataException {
frameWriter.fail();
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-hadoop/pom.xml b/hyracks/hyracks-dataflow-hadoop/pom.xml
index 3947387..657f6db 100644
--- a/hyracks/hyracks-dataflow-hadoop/pom.xml
+++ b/hyracks/hyracks-dataflow-hadoop/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -26,14 +26,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
@@ -53,7 +53,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
index 2841509..5e5cae1 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/HadoopReadOperatorDescriptor.java
@@ -207,7 +207,7 @@
FrameUtils.flushFrame(outBuffer, writer);
appender.reset(outBuffer, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + outBuffer.capacity() + ")");
}
}
}
@@ -235,4 +235,4 @@
}
};
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
index 4b1144a..e498975 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/MapperOperatorDescriptor.java
@@ -114,7 +114,7 @@
runGen.nextFrame(frame);
fta.reset(frame, true);
if (!fta.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + frame.capacity() + ")");
}
}
}
@@ -268,4 +268,4 @@
}
};
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
index 33d58af..1a6f6fd 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ReduceWriter.java
@@ -114,7 +114,7 @@
FrameUtils.copy(buffer, copyFrame);
}
- private void accumulate(FrameTupleAccessor accessor, int tIndex) {
+ private void accumulate(FrameTupleAccessor accessor, int tIndex) throws HyracksDataException {
if (!fta.append(accessor, tIndex)) {
++bPtr;
if (group.size() <= bPtr) {
@@ -122,7 +122,9 @@
}
fta.reset(group.get(bPtr), true);
if (!fta.append(accessor, tIndex)) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size ("
+ + (accessor.getTupleEndOffset(tIndex) - accessor.getTupleStartOffset(tIndex))
+ + ") larger than frame size (" + group.get(bPtr).capacity() + ")");
}
}
}
@@ -184,4 +186,4 @@
@Override
public void fail() throws HyracksDataException {
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java
index 8e03eae..253dcbf 100644
--- a/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java
+++ b/hyracks/hyracks-dataflow-hadoop/src/main/java/edu/uci/ics/hyracks/dataflow/hadoop/mapreduce/ShuffleFrameReader.java
@@ -154,7 +154,9 @@
if (!fta.append(accessor, tIdx)) {
flush();
if (!fta.append(accessor, tIdx)) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size ("
+ + (accessor.getTupleEndOffset(tIdx) - accessor.getTupleStartOffset(tIdx))
+ + ") larger than frame size (" + fta.getBuffer().capacity() + ")");
}
}
}
@@ -175,4 +177,4 @@
fta.reset(buffer, true);
}
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-std/pom.xml b/hyracks/hyracks-dataflow-std/pom.xml
index db4f452..8e67891 100644
--- a/hyracks/hyracks-dataflow-std/pom.xml
+++ b/hyracks/hyracks-dataflow-std/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,14 +27,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
@@ -45,5 +45,10 @@
<type>jar</type>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>2.4</version>
+ </dependency>
</dependencies>
</project>
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java
index 6ec9013..bea94ff 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/LocalityAwarePartitionDataWriter.java
@@ -83,7 +83,7 @@
flushFrame(appenderBuffer, pWriters[h]);
appender.reset(appenderBuffer, true);
if (!appender.append(tupleAccessor, i)) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + (tupleAccessor.getTupleEndOffset(i) - tupleAccessor.getTupleStartOffset(i)) + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java
index 6b3f3c0..820e2ce 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/connectors/PartitionDataWriter.java
@@ -87,7 +87,7 @@
flushFrame(appenderBuffer, pWriters[h]);
appender.reset(appenderBuffer, true);
if (!appender.append(tupleAccessor, i)) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + (tupleAccessor.getTupleEndOffset(i) - tupleAccessor.getTupleStartOffset(i)) + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
@@ -99,4 +99,4 @@
pWriters[i].fail();
}
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
index 7cf437d..9c1ea9d 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
@@ -71,7 +71,7 @@
FrameUtils.flushFrame(frame, writer);
appender.reset(frame, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
@@ -249,4 +249,4 @@
return true;
}
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
new file mode 100644
index 0000000..bf7ff33
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileRemoveOperatorDescriptor.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.std.file;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.commons.io.FileUtils;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+
+public class FileRemoveOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+
+ private final IFileSplitProvider fileSplitProvider;
+
+ public FileRemoveOperatorDescriptor(IOperatorDescriptorRegistry spec, IFileSplitProvider fileSplitProvder) {
+ super(spec, 0, 0);
+ this.fileSplitProvider = fileSplitProvder;
+ }
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ final FileSplit split = fileSplitProvider.getFileSplits()[partition];
+ return new AbstractOperatorNodePushable() {
+
+ @Override
+ public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ File f = split.getLocalFile().getFile();
+ try {
+ FileUtils.deleteDirectory(f);
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public IFrameWriter getInputFrameWriter(int index) {
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public int getInputArity() {
+ return 0;
+ }
+
+ @Override
+ public void deinitialize() throws HyracksDataException {
+ }
+ };
+ }
+
+}
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java
index b74d97b..35649ca 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/file/FileSplit.java
@@ -23,17 +23,25 @@
private static final long serialVersionUID = 1L;
private final String nodeName;
-
private final FileReference file;
+ private final int ioDeviceId;
public FileSplit(String nodeName, FileReference file) {
this.nodeName = nodeName;
this.file = file;
+ this.ioDeviceId = 0;
+ }
+
+ public FileSplit(String nodeName, FileReference file, int ioDeviceId) {
+ this.nodeName = nodeName;
+ this.file = file;
+ this.ioDeviceId = ioDeviceId;
}
public FileSplit(String nodeName, String path) {
this.nodeName = nodeName;
this.file = new FileReference(new File(path));
+ this.ioDeviceId = 0;
}
public String getNodeName() {
@@ -43,4 +51,8 @@
public FileReference getLocalFile() {
return file;
}
+
+ public int getIODeviceId() {
+ return ioDeviceId;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
index e1fda74..ebf3848 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -103,32 +103,34 @@
accessorProbe.reset(buffer);
int tupleCount0 = accessorProbe.getTupleCount();
for (int i = 0; i < tupleCount0; ++i) {
- int entry = tpcProbe.partition(accessorProbe, i, tableSize);
- boolean matchFound = false;
- int offset = 0;
- do {
- table.getTuplePointer(entry, offset++, storedTuplePointer);
- if (storedTuplePointer.frameIndex < 0)
- break;
- int bIndex = storedTuplePointer.frameIndex;
- int tIndex = storedTuplePointer.tupleIndex;
- accessorBuild.reset(buffers.get(bIndex));
- int c = tpComparator.compare(accessorProbe, i, accessorBuild, tIndex);
- if (c == 0) {
- matchFound = true;
- appendToResult(i, tIndex, writer);
- }
- } while (true);
-
+ boolean matchFound = false;
+ if(tableSize != 0){
+ int entry = tpcProbe.partition(accessorProbe, i, tableSize);
+ int offset = 0;
+ do {
+ table.getTuplePointer(entry, offset++, storedTuplePointer);
+ if (storedTuplePointer.frameIndex < 0)
+ break;
+ int bIndex = storedTuplePointer.frameIndex;
+ int tIndex = storedTuplePointer.tupleIndex;
+ accessorBuild.reset(buffers.get(bIndex));
+ int c = tpComparator.compare(accessorProbe, i, accessorBuild, tIndex);
+ if (c == 0) {
+ matchFound = true;
+ appendToResult(i, tIndex, writer);
+ }
+ } while (true);
+ }
+
if (!matchFound && isLeftOuter) {
-
if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
nullTupleBuild.getByteArray(), 0, nullTupleBuild.getSize())) {
flushFrame(outBuffer, writer);
appender.reset(outBuffer, true);
if (!appender.appendConcat(accessorProbe, i, nullTupleBuild.getFieldEndOffsets(),
nullTupleBuild.getByteArray(), 0, nullTupleBuild.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size larger than frame size ("
+ + appender.getBuffer().capacity() + ")");
}
}
@@ -156,7 +158,12 @@
flushFrame(outBuffer, writer);
appender.reset(outBuffer, true);
if (!appender.appendConcat(accessorProbe, probeSidetIx, accessorBuild, buildSidetIx)) {
- throw new IllegalStateException();
+ int tSize = accessorProbe.getTupleEndOffset(probeSidetIx)
+ - accessorProbe.getTupleStartOffset(probeSidetIx)
+ + accessorBuild.getTupleEndOffset(buildSidetIx)
+ - accessorBuild.getTupleStartOffset(buildSidetIx);
+ throw new HyracksDataException("Record size (" + tSize + ") larger than frame size ("
+ + appender.getBuffer().capacity() + ")");
}
}
} else {
@@ -164,9 +171,14 @@
flushFrame(outBuffer, writer);
appender.reset(outBuffer, true);
if (!appender.appendConcat(accessorBuild, buildSidetIx, accessorProbe, probeSidetIx)) {
- throw new IllegalStateException();
+ int tSize = accessorProbe.getTupleEndOffset(probeSidetIx)
+ - accessorProbe.getTupleStartOffset(probeSidetIx)
+ + accessorBuild.getTupleEndOffset(buildSidetIx)
+ - accessorBuild.getTupleStartOffset(buildSidetIx);
+ throw new HyracksDataException("Record size (" + tSize + ") larger than frame size ("
+ + appender.getBuffer().capacity() + ")");
}
}
}
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
index 6870e71..546e80f 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/NestedLoopJoin.java
@@ -136,7 +136,10 @@
flushFrame(outBuffer, writer);
appender.reset(outBuffer, true);
if (!appender.appendConcat(accessorOuter, i, accessorInner, j)) {
- throw new IllegalStateException();
+ int tSize = accessorOuter.getTupleEndOffset(i) - accessorOuter.getTupleStartOffset(i)
+ + accessorInner.getTupleEndOffset(j) - accessorInner.getTupleStartOffset(j);
+ throw new HyracksDataException("Record size (" + tSize + ") larger than frame size ("
+ + appender.getBuffer().capacity() + ")");
}
}
}
@@ -149,7 +152,10 @@
appender.reset(outBuffer, true);
if (!appender.appendConcat(accessorOuter, i, nullTupleBuilder.getFieldEndOffsets(),
nullTupleBuilder.getByteArray(), 0, nullTupleBuilder.getSize())) {
- throw new IllegalStateException();
+ int tSize = accessorOuter.getTupleEndOffset(i) - accessorOuter.getTupleStartOffset(i)
+ + nullTupleBuilder.getSize();
+ throw new HyracksDataException("Record size (" + tSize + ") larger than frame size ("
+ + appender.getBuffer().capacity() + ")");
}
}
}
@@ -196,4 +202,4 @@
}
return 0;
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index cf39416..7067080 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -31,7 +31,6 @@
import edu.uci.ics.hyracks.api.dataflow.value.INullWriter;
import edu.uci.ics.hyracks.api.dataflow.value.INullWriterFactory;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparator;
import edu.uci.ics.hyracks.api.dataflow.value.ITuplePairComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputer;
@@ -372,9 +371,7 @@
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- if(!state.hybridHJ.isTableEmpty()){
- state.hybridHJ.probe(buffer, writer);
- }
+ state.hybridHJ.probe(buffer, writer);
}
@Override
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
index b51132e..39cfb88 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
@@ -194,7 +194,9 @@
outFrameAppender.reset(outFrame, true);
for (int i = 0; i < copyCount; i++) {
if (!outFrameAppender.append(fta, i)) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size ("
+ + (fta.getTupleEndOffset(i) - fta.getTupleStartOffset(i))
+ + ") larger than frame size (" + outFrameAppender.getBuffer().capacity() + ")");
}
totalCount++;
}
diff --git a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java
index 680b98e..ff02d57 100644
--- a/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java
+++ b/hyracks/hyracks-dataflow-std/src/main/java/edu/uci/ics/hyracks/dataflow/std/sort/FrameSorter.java
@@ -132,7 +132,8 @@
FrameUtils.flushFrame(outFrame, writer);
appender.reset(outFrame, true);
if (!appender.append(fta1, tStart, tEnd)) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + (tEnd - tStart) + ") larger than frame size ("
+ + appender.getBuffer().capacity() + ")");
}
}
}
@@ -242,4 +243,4 @@
public void close() {
this.buffers.clear();
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-dist/pom.xml b/hyracks/hyracks-dist/pom.xml
index d600fd8..2dcd0ed 100755
--- a/hyracks/hyracks-dist/pom.xml
+++ b/hyracks/hyracks-dist/pom.xml
@@ -4,7 +4,7 @@
<parent>
<artifactId>hyracks</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<artifactId>hyracks-dist</artifactId>
diff --git a/hyracks/hyracks-documentation/pom.xml b/hyracks/hyracks-documentation/pom.xml
index 6473cc8..958bd71 100644
--- a/hyracks/hyracks-documentation/pom.xml
+++ b/hyracks/hyracks-documentation/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
index f7698be..b2c1779 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
@@ -6,26 +6,26 @@
<parent>
<groupId>edu.uci.ics.hyracks.examples</groupId>
<artifactId>btree-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
<artifactId>btreehelper</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
index 637612d..b394a88 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
@@ -6,32 +6,32 @@
<parent>
<groupId>edu.uci.ics.hyracks.examples</groupId>
<artifactId>btree-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
<build>
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
index f342d23..b9a27ec 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
@@ -90,7 +90,7 @@
FrameUtils.flushFrame(outputFrame, writer);
appender.reset(outputFrame, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
diff --git a/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml b/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
index ead4e89..c70578e 100644
--- a/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
@@ -2,13 +2,13 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
<artifactId>btreeserver</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<name>btreeserver</name>
<parent>
<groupId>edu.uci.ics.hyracks.examples</groupId>
<artifactId>btree-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -63,20 +63,20 @@
<dependency>
<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
<artifactId>btreehelper</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/btree-example/pom.xml b/hyracks/hyracks-examples/btree-example/pom.xml
index ce0a486..7332d0e 100644
--- a/hyracks/hyracks-examples/btree-example/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/pom.xml
@@ -8,7 +8,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-examples</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
index e618295..acf2dbf 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatclient/pom.xml
@@ -7,20 +7,20 @@
<parent>
<groupId>edu.uci.ics.hyracks.examples</groupId>
<artifactId>hadoop-compat-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks.examples.compat</groupId>
<artifactId>hadoopcompathelper</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
index 5f7f072..94f43df 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompathelper/pom.xml
@@ -7,20 +7,20 @@
<parent>
<groupId>edu.uci.ics.hyracks.examples</groupId>
<artifactId>hadoop-compat-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/pom.xml
index d087992..9a0f7cb 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/pom.xml
@@ -2,13 +2,13 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks.examples.compat</groupId>
<artifactId>hadoopcompatserver</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<name>hadoopcompatserver</name>
<parent>
<groupId>edu.uci.ics.hyracks.examples</groupId>
<artifactId>hadoop-compat-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -60,7 +60,7 @@
<plugin>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<configuration>
<hyracksServerHome>${basedir}/target/hadoopcompatserver-${project.version}-binary-assembly</hyracksServerHome>
<jvmOptions>${jvm.extraargs}</jvmOptions>
@@ -135,27 +135,27 @@
<dependency>
<groupId>edu.uci.ics.hyracks.examples.compat</groupId>
<artifactId>hadoopcompathelper</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks.examples.compat</groupId>
<artifactId>hadoopcompatclient</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
index 980e800..e927a1c 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
@@ -8,7 +8,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-examples</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
@@ -29,7 +29,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-hadoop-compat</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index 7c38b13..08ff5e2 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-examples</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
<plugins>
@@ -32,75 +32,75 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-client</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java
index 935724b..932e166 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java
@@ -25,6 +25,7 @@
protected final String sep = System.getProperty("file.separator");
protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
protected static int DEFAULT_MEM_NUM_PAGES = 1000;
+ protected static double DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE = 0.01;
public String getPrimaryIndexName() {
return System.getProperty("java.io.tmpdir") + sep + "primary" + simpleDateFormat.format(new Date());
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java
index 62d4362..dfd5495 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java
@@ -54,7 +54,7 @@
invertedIndexDataflowHelperFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
new ConstantMergePolicyProvider(MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE,
- DEFAULT_MEM_NUM_PAGES);
+ DEFAULT_MEM_NUM_PAGES, DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE);
}
@Override
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java
index c35c6c9..9e5a47b 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java
@@ -48,7 +48,8 @@
invertedIndexDataflowHelperFactory = new LSMInvertedIndexDataflowHelperFactory(new ConstantMergePolicyProvider(
MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
- SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
+ SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE,
+ DEFAULT_MEM_NUM_PAGES, DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE);
}
@Override
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java
index 912ab0e..c67cbfd 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java
@@ -35,7 +35,8 @@
public IIndexDataflowHelperFactory createDataFlowHelperFactory() {
return new LSMBTreeDataflowHelperFactory(new ConstantMergePolicyProvider(MERGE_THRESHOLD),
ThreadCountingOperationTrackerFactory.INSTANCE, SynchronousSchedulerProvider.INSTANCE,
- NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
+ NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES,
+ DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE);
}
}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java
index 84b34b7..a892b4e 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java
@@ -42,6 +42,6 @@
return new LSMRTreeDataflowHelperFactory(valueProviderFactories, rtreePolicyType, btreeComparatorFactories,
new ConstantMergePolicyProvider(MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, linearizerCmpFactory,
- DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
+ DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE);
}
}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
index e74e54c..89679ae 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -60,9 +60,10 @@
private static IHyracksClientConnection hcc;
private final List<File> outputFiles;
-
+
protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
protected static int DEFAULT_MEM_NUM_PAGES = 1000;
+ protected static double DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE = 0.01;
@Rule
public TemporaryFolder outputFolder = new TemporaryFolder();
diff --git a/hyracks/hyracks-examples/pom.xml b/hyracks/hyracks-examples/pom.xml
index a47cb9c..6159b49 100644
--- a/hyracks/hyracks-examples/pom.xml
+++ b/hyracks/hyracks-examples/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/hyracks-examples/text-example/pom.xml b/hyracks/hyracks-examples/text-example/pom.xml
index ee75f31..54aa3be 100644
--- a/hyracks/hyracks-examples/text-example/pom.xml
+++ b/hyracks/hyracks-examples/text-example/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-examples</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/hyracks-examples/text-example/textclient/pom.xml b/hyracks/hyracks-examples/text-example/textclient/pom.xml
index 7bf1c44..b2b7dbb 100644
--- a/hyracks/hyracks-examples/text-example/textclient/pom.xml
+++ b/hyracks/hyracks-examples/text-example/textclient/pom.xml
@@ -6,20 +6,20 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>text-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>texthelper</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/text-example/texthelper/pom.xml b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
index c948242..c20ba88 100644
--- a/hyracks/hyracks-examples/text-example/texthelper/pom.xml
+++ b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
@@ -6,26 +6,26 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>text-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
<build>
diff --git a/hyracks/hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java b/hyracks/hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java
index 249e3bb..cda1380 100644
--- a/hyracks/hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java
+++ b/hyracks/hyracks-examples/text-example/texthelper/src/main/java/edu/uci/ics/hyracks/examples/text/WordTupleParserFactory.java
@@ -44,7 +44,7 @@
FrameUtils.flushFrame(frame, writer);
appender.reset(frame, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
@@ -167,4 +167,4 @@
return true;
}
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-examples/text-example/textserver/pom.xml b/hyracks/hyracks-examples/text-example/textserver/pom.xml
index bcd43a4..1daca96 100644
--- a/hyracks/hyracks-examples/text-example/textserver/pom.xml
+++ b/hyracks/hyracks-examples/text-example/textserver/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>text-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -59,7 +59,7 @@
<plugin>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<configuration>
<hyracksServerHome>${basedir}/target/textserver-${project.version}-binary-assembly</hyracksServerHome>
<jvmOptions>${jvm.extraargs}</jvmOptions>
@@ -134,27 +134,27 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>texthelper</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>textclient</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
diff --git a/hyracks/hyracks-examples/tpch-example/pom.xml b/hyracks/hyracks-examples/tpch-example/pom.xml
index f8acd8d..68577a0 100644
--- a/hyracks/hyracks-examples/tpch-example/pom.xml
+++ b/hyracks/hyracks-examples/tpch-example/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-examples</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
index 9f08f0c..127d0f1 100644
--- a/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
+++ b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
@@ -5,20 +5,20 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>tpch-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
<build>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
index 6b97078..fb45c3b 100644
--- a/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
+++ b/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>tpch-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -62,25 +62,25 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-hadoop-compat/pom.xml b/hyracks/hyracks-hadoop-compat/pom.xml
index 88082af..f9b1b3a 100644
--- a/hyracks/hyracks-hadoop-compat/pom.xml
+++ b/hyracks/hyracks-hadoop-compat/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -80,7 +80,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-hadoop</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
index dd26f1e..39f064f 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.20.2/pom.xml
@@ -6,7 +6,7 @@
<parent>
<artifactId>hyracks-hdfs</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -98,7 +98,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
index d0f11eb..fd8b1c5 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-0.23.1/pom.xml
@@ -6,7 +6,7 @@
<parent>
<artifactId>hyracks-hdfs</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
<plugins>
@@ -202,7 +202,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
index 436e09f..87875f9 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/pom.xml
@@ -6,7 +6,7 @@
<parent>
<artifactId>hyracks-hdfs</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -178,31 +178,31 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
diff --git a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
index 9e9abdf..8b58ecd 100644
--- a/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
+++ b/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/edu/uci/ics/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
@@ -114,6 +114,7 @@
try {
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
Job job = confFactory.getConf();
+ job.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx);
writer.open();
InputFormat inputFormat = ReflectionUtils.newInstance(job.getInputFormatClass(),
diff --git a/hyracks/hyracks-hdfs/pom.xml b/hyracks/hyracks-hdfs/pom.xml
index e3ada06..b8cb5fe 100644
--- a/hyracks/hyracks-hdfs/pom.xml
+++ b/hyracks/hyracks-hdfs/pom.xml
@@ -8,7 +8,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/hyracks-ipc/pom.xml b/hyracks/hyracks-ipc/pom.xml
index 1368e20..d76aa56 100644
--- a/hyracks/hyracks-ipc/pom.xml
+++ b/hyracks/hyracks-ipc/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
diff --git a/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
index c0a9c45..3a84622 100644
--- a/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
+++ b/hyracks/hyracks-maven-plugins/hyracks-virtualcluster-maven-plugin/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-maven-plugins</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
diff --git a/hyracks/hyracks-maven-plugins/pom.xml b/hyracks/hyracks-maven-plugins/pom.xml
index 08bbb2c..39338e7 100644
--- a/hyracks/hyracks-maven-plugins/pom.xml
+++ b/hyracks/hyracks-maven-plugins/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<dependencies>
diff --git a/hyracks/hyracks-net/pom.xml b/hyracks/hyracks-net/pom.xml
index 9dde9a1..b82a88a 100644
--- a/hyracks/hyracks-net/pom.xml
+++ b/hyracks/hyracks-net/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
diff --git a/hyracks/hyracks-server/pom.xml b/hyracks/hyracks-server/pom.xml
index e32b658..bdf8db6 100644
--- a/hyracks/hyracks-server/pom.xml
+++ b/hyracks/hyracks-server/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -74,14 +74,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-bloomfilter/pom.xml b/hyracks/hyracks-storage-am-bloomfilter/pom.xml
index 76c64d8..3c3f14b 100644
--- a/hyracks/hyracks-storage-am-bloomfilter/pom.xml
+++ b/hyracks/hyracks-storage-am-bloomfilter/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,7 +25,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-btree/pom.xml b/hyracks/hyracks-storage-am-btree/pom.xml
index 201d597..a337609 100644
--- a/hyracks/hyracks-storage-am-btree/pom.xml
+++ b/hyracks/hyracks-storage-am-btree/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,28 +27,28 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java
index 648e523..35ec2f6 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeUpdateSearchOperatorNodePushable.java
@@ -17,6 +17,7 @@
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
@@ -67,7 +68,8 @@
FrameUtils.flushFrame(writeBuffer, writer);
appender.reset(writeBuffer, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size ("
+ + appender.getBuffer().capacity() + ")");
}
}
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
index 354aa1e..5027dc7 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
@@ -82,6 +82,16 @@
}
@Override
+ public int getMaxTupleSize(int pageSize) {
+ return (pageSize - getPageHeaderSize()) / 2;
+ }
+
+ @Override
+ public int getBytesRequriedToWriteTuple(ITupleReference tuple) {
+ return tupleWriter.bytesRequired(tuple) + slotManager.getSlotSize();
+ }
+
+ @Override
public void setPage(ICachedPage page) {
this.page = page;
this.buf = page.getBuffer();
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
index 90b167f..8753d6b 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
@@ -54,6 +54,11 @@
}
@Override
+ public int getBytesRequriedToWriteTuple(ITupleReference tuple) {
+ return tupleWriter.bytesRequired(tuple) + childPtrSize + slotManager.getSlotSize();
+ }
+
+ @Override
public void initBuffer(byte level) {
super.initBuffer(level);
buf.putInt(rightLeafOff, -1);
@@ -185,7 +190,7 @@
ITreeIndexFrame targetFrame = null;
int totalSize = 0;
- int halfPageSize = buf.capacity() / 2 - getPageHeaderSize();
+ int halfPageSize = (buf.capacity() - getPageHeaderSize()) / 2;
int i;
for (i = 0; i < tupleCount; ++i) {
frameTuple.resetByTupleIndex(this, i);
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
index 04b3077..bfb1eca 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
@@ -46,6 +46,11 @@
}
@Override
+ public int getBytesRequriedToWriteTuple(ITupleReference tuple) {
+ return tupleWriter.bytesRequired(tuple) + slotManager.getSlotSize();
+ }
+
+ @Override
public void initBuffer(byte level) {
super.initBuffer(level);
buf.putInt(nextLeafOff, -1);
@@ -146,7 +151,7 @@
int tuplesToLeft;
ITreeIndexFrame targetFrame = null;
int totalSize = 0;
- int halfPageSize = buf.capacity() / 2 - getPageHeaderSize();
+ int halfPageSize = (buf.capacity() - getPageHeaderSize()) / 2;
int i;
for (i = 0; i < tupleCount; ++i) {
frameTuple.resetByTupleIndex(this, i);
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index 86bc32a..8846873 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -47,6 +47,7 @@
import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
@@ -74,6 +75,7 @@
private final AtomicInteger smoCounter;
private final ReadWriteLock treeLatch;
+ private final int maxTupleSize;
public BTree(IBufferCache bufferCache, IFileMapProvider fileMapProvider, IFreePageManager freePageManager,
ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
@@ -82,6 +84,10 @@
fieldCount, file);
this.treeLatch = new ReentrantReadWriteLock(true);
this.smoCounter = new AtomicInteger();
+ ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+ ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
+ maxTupleSize = Math.min(leafFrame.getMaxTupleSize(bufferCache.getPageSize()),
+ interiorFrame.getMaxTupleSize(bufferCache.getPageSize()));
}
private void diskOrderScan(ITreeIndexCursor icursor, BTreeOpContext ctx) throws HyracksDataException {
@@ -304,11 +310,23 @@
}
private void insert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+ int tupleSize = Math.max(ctx.leafFrame.getBytesRequriedToWriteTuple(tuple),
+ ctx.interiorFrame.getBytesRequriedToWriteTuple(tuple));
+ if (tupleSize > maxTupleSize) {
+ throw new TreeIndexException("Space required for record (" + tupleSize
+ + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ }
ctx.modificationCallback.before(tuple);
insertUpdateOrDelete(tuple, ctx);
}
private void upsert(ITupleReference tuple, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+ int tupleSize = Math.max(ctx.leafFrame.getBytesRequriedToWriteTuple(tuple),
+ ctx.interiorFrame.getBytesRequriedToWriteTuple(tuple));
+ if (tupleSize > maxTupleSize) {
+ throw new TreeIndexException("Space required for record (" + tupleSize
+ + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ }
ctx.modificationCallback.before(tuple);
insertUpdateOrDelete(tuple, ctx);
}
@@ -320,6 +338,12 @@
if (fieldCount == ctx.cmp.getKeyFieldCount()) {
throw new BTreeNotUpdateableException("Cannot perform updates when the entire tuple forms the key.");
}
+ int tupleSize = Math.max(ctx.leafFrame.getBytesRequriedToWriteTuple(tuple),
+ ctx.interiorFrame.getBytesRequriedToWriteTuple(tuple));
+ if (tupleSize > maxTupleSize) {
+ throw new TreeIndexException("Space required for record (" + tupleSize
+ + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ }
ctx.modificationCallback.before(tuple);
insertUpdateOrDelete(tuple, ctx);
}
@@ -933,6 +957,13 @@
@Override
public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
try {
+ int tupleSize = Math.max(leafFrame.getBytesRequriedToWriteTuple(tuple),
+ interiorFrame.getBytesRequriedToWriteTuple(tuple));
+ if (tupleSize > maxTupleSize) {
+ throw new TreeIndexException("Space required for record (" + tupleSize
+ + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ }
+
NodeFrontier leafFrontier = nodeFrontiers.get(0);
int spaceNeeded = tupleWriter.bytesRequired(tuple) + slotSize;
diff --git a/hyracks/hyracks-storage-am-common/pom.xml b/hyracks/hyracks-storage-am-common/pom.xml
index 0175467..2a537a1 100644
--- a/hyracks/hyracks-storage-am-common/pom.xml
+++ b/hyracks/hyracks-storage-am-common/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,28 +27,28 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
index 612af25..4a14505 100644
--- a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -59,6 +59,10 @@
public ByteBuffer getBuffer();
+ public int getMaxTupleSize(int pageSize);
+
+ public int getBytesRequriedToWriteTuple(ITupleReference tuple);
+
// for debugging
public String printHeader();
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
index e46efff..22c6637 100644
--- a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexDataflowHelper.java
@@ -15,6 +15,7 @@
package edu.uci.ics.hyracks.storage.am.common.dataflow;
+import java.io.File;
import java.io.IOException;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -23,6 +24,7 @@
import edu.uci.ics.hyracks.storage.am.common.api.IIndex;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexDataflowHelper;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.common.util.IndexFileNameUtil;
import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactory;
import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
import edu.uci.ics.hyracks.storage.common.file.LocalResource;
@@ -37,6 +39,7 @@
protected final ResourceIdFactory resourceIdFactory;
protected final FileReference file;
protected final int partition;
+ protected final int ioDeviceId;
protected IIndex index;
@@ -47,7 +50,9 @@
this.localResourceRepository = opDesc.getStorageManager().getLocalResourceRepository(ctx);
this.resourceIdFactory = opDesc.getStorageManager().getResourceIdFactory(ctx);
this.partition = partition;
- this.file = opDesc.getFileSplitProvider().getFileSplits()[partition].getLocalFile();
+ this.ioDeviceId = opDesc.getFileSplitProvider().getFileSplits()[partition].getIODeviceId();
+ this.file = new FileReference(new File(IndexFileNameUtil.prepareFileName(opDesc.getFileSplitProvider()
+ .getFileSplits()[partition].getLocalFile().getFile().getPath(), ioDeviceId)));
}
protected abstract IIndex createIndexInstance() throws HyracksDataException;
@@ -70,7 +75,7 @@
// any physical artifact that the LocalResourceRepository is managing (e.g. a file containing the resource ID).
// Once the index has been created, a new resource ID can be generated.
if (resourceID != -1) {
- localResourceRepository.deleteResourceByName(file.getFile().getPath());
+ localResourceRepository.deleteResourceByName(file.getFile().getPath(), ioDeviceId);
}
index.create();
try {
@@ -78,8 +83,9 @@
resourceID = resourceIdFactory.createId();
ILocalResourceFactory localResourceFactory = opDesc.getLocalResourceFactoryProvider()
.getLocalResourceFactory();
- localResourceRepository.insert(localResourceFactory.createLocalResource(resourceID, file.getFile()
- .getPath(), partition));
+ localResourceRepository.insert(
+ localResourceFactory.createLocalResource(resourceID, file.getFile().getPath(), partition),
+ ioDeviceId);
} catch (IOException e) {
throw new HyracksDataException(e);
}
@@ -121,7 +127,7 @@
}
if (resourceID != -1) {
- localResourceRepository.deleteResourceByName(file.getFile().getPath());
+ localResourceRepository.deleteResourceByName(file.getFile().getPath(), ioDeviceId);
}
index.destroy();
}
@@ -143,4 +149,4 @@
public IHyracksTaskContext getTaskContext() {
return ctx;
}
-}
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
index 98beea2..d867c6c 100644
--- a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
@@ -119,7 +119,7 @@
FrameUtils.flushFrame(writeBuffer, writer);
appender.reset(writeBuffer, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
@@ -162,4 +162,4 @@
public void fail() throws HyracksDataException {
writer.fail();
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
index a861af7..c2b5ca8 100644
--- a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
@@ -80,7 +80,7 @@
FrameUtils.flushFrame(frame, writer);
appender.reset(frame, true);
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
@@ -104,4 +104,4 @@
public void deinitialize() throws HyracksDataException {
treeIndexHelper.close();
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
index 09d357d..7e57240 100644
--- a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
@@ -79,7 +79,7 @@
UTF8StringSerializerDeserializer.INSTANCE.serialize(stats.toString(), dos);
tb.addFieldEndOffset();
if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
FrameUtils.flushFrame(frame, writer);
} catch (Exception e) {
@@ -89,4 +89,4 @@
treeIndexHelper.close();
}
}
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
index 31ce573..2e97b21 100644
--- a/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
@@ -61,6 +61,11 @@
}
@Override
+ public int getMaxTupleSize(int pageSize) {
+ return (pageSize - getPageHeaderSize()) / 2;
+ }
+
+ @Override
public boolean isLeaf() {
return buf.get(levelOff) == 0;
}
diff --git a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/IndexFileNameUtil.java
similarity index 63%
copy from hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
copy to hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/IndexFileNameUtil.java
index 8f5ed64..fbab3cf 100644
--- a/hyracks/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataset/IDatasetPartitionReader.java
+++ b/hyracks/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/IndexFileNameUtil.java
@@ -1,21 +1,25 @@
/*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.hyracks.api.dataset;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+package edu.uci.ics.hyracks.storage.am.common.util;
-public interface IDatasetPartitionReader {
- public void writeTo(IFrameWriter writer);
-}
+import java.io.File;
+
+public class IndexFileNameUtil {
+
+ public static String prepareFileName(String path, int ioDeviceId) {
+ return path + File.separator + "device_id_" + ioDeviceId;
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-lsm-btree/pom.xml b/hyracks/hyracks-storage-am-lsm-btree/pom.xml
index 6c4ffd3..29c2ab3 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/pom.xml
+++ b/hyracks/hyracks-storage-am-lsm-btree/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,21 +25,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-bloomfilter</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelper.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelper.java
index 06f06c6..eb2e760 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelper.java
@@ -40,16 +40,17 @@
public LSMBTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
- ioScheduler, ioOpCallbackProvider);
+ this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES,
+ DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider);
}
public LSMBTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
- int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
+ int memPageSize, int memNumPages, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider);
+ super(opDesc, ctx, partition, memPageSize, memNumPages, bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory, ioScheduler, ioOpCallbackProvider);
}
@Override
@@ -62,7 +63,8 @@
return LSMBTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ctx.getIOManager(), file, opDesc
.getStorageManager().getBufferCache(ctx), opDesc.getStorageManager().getFileMapProvider(ctx),
treeOpDesc.getTreeIndexTypeTraits(), treeOpDesc.getTreeIndexComparatorFactories(), treeOpDesc
- .getTreeIndexBloomFilterKeyFields(), mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider, partition);
+ .getTreeIndexBloomFilterKeyFields(), bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory, ioScheduler, ioOpCallbackProvider,
+ opDesc.getFileSplitProvider().getFileSplits()[partition].getIODeviceId());
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelperFactory.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelperFactory.java
index ebf4bc0..e706786 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelperFactory.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/dataflow/LSMBTreeDataflowHelperFactory.java
@@ -30,16 +30,17 @@
public LSMBTreeDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages,
+ double bloomFilterFalsePositiveRate) {
super(mergePolicyProvider, opTrackerFactory, ioSchedulerProvider, ioOpCallbackProvider, memPageSize,
- memNumPages);
+ memNumPages, bloomFilterFalsePositiveRate);
}
@Override
public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
int partition) {
return new LSMBTreeDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages,
- mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory, ioSchedulerProvider.getIOScheduler(ctx),
- ioOpCallbackProvider);
+ bloomFilterFalsePositiveRate, mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory,
+ ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider);
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
index 2847004..1652f5a 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
@@ -93,12 +93,12 @@
ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory insertLeafFrameFactory,
ITreeIndexFrameFactory deleteLeafFrameFactory, ILSMIndexFileManager fileManager,
TreeIndexFactory<BTree> diskBTreeFactory, TreeIndexFactory<BTree> bulkLoadBTreeFactory,
- BloomFilterFactory bloomFilterFactory, IFileMapProvider diskFileMapProvider, int fieldCount,
- IBinaryComparatorFactory[] cmpFactories, ILSMMergePolicy mergePolicy,
- ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- super(memFreePageManager, diskBTreeFactory.getBufferCache(), fileManager, diskFileMapProvider, mergePolicy,
- opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ BloomFilterFactory bloomFilterFactory, double bloomFilterFalsePositiveRate,
+ IFileMapProvider diskFileMapProvider, int fieldCount, IBinaryComparatorFactory[] cmpFactories,
+ ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
+ ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
+ super(memFreePageManager, diskBTreeFactory.getBufferCache(), fileManager, diskFileMapProvider,
+ bloomFilterFalsePositiveRate, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
mutableComponent = new LSMBTreeMutableComponent(new BTree(memBufferCache,
((InMemoryBufferCache) memBufferCache).getFileMapProvider(), memFreePageManager, interiorFrameFactory,
insertLeafFrameFactory, cmpFactories, fieldCount, new FileReference(new File("membtree"))),
@@ -367,7 +367,7 @@
int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
- MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
+ bloomFilterFalsePositiveRate);
LSMBTreeImmutableComponent component = createDiskComponent(componentFactory, flushOp.getBTreeFlushTarget(),
flushOp.getBloomFilterFlushTarget(), true);
@@ -427,7 +427,7 @@
int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElements);
BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
- MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
+ bloomFilterFalsePositiveRate);
LSMBTreeImmutableComponent mergedComponent = createDiskComponent(componentFactory,
mergeOp.getBTreeMergeTarget(), mergeOp.getBloomFilterMergeTarget(), true);
@@ -516,7 +516,7 @@
int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numElementsHint);
BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
- MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
+ bloomFilterFalsePositiveRate);
builder = ((LSMBTreeImmutableComponent) component).getBloomFilter().createBuilder(numElementsHint,
bloomFilterSpec.getNumHashes(), bloomFilterSpec.getNumBucketsPerElements());
}
@@ -647,4 +647,9 @@
btree.validate();
}
}
+
+ @Override
+ public String toString() {
+ return "LSMBTree [" + fileManager.getBaseDir() + "]";
+ }
}
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
index bbcee06..0080c42 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTreeFileManager.java
@@ -27,7 +27,6 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
@@ -41,8 +40,8 @@
private final TreeIndexFactory<? extends ITreeIndex> btreeFactory;
public LSMBTreeFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
- TreeIndexFactory<? extends ITreeIndex> btreeFactory, int startIODeviceIndex) {
- super(ioManager, fileMapProvider, file, null, startIODeviceIndex);
+ TreeIndexFactory<? extends ITreeIndex> btreeFactory, int ioDeviceId) {
+ super(ioManager, fileMapProvider, file, null, ioDeviceId);
this.btreeFactory = btreeFactory;
}
@@ -80,17 +79,16 @@
ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<ComparableFileName>();
ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<ComparableFileName>();
- // Gather files from all IODeviceHandles.
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- // List of valid BTree files.
- cleanupAndGetValidFilesInternal(dev, btreeFilter, btreeFactory, allBTreeFiles);
- HashSet<String> btreeFilesSet = new HashSet<String>();
- for (ComparableFileName cmpFileName : allBTreeFiles) {
- int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
- btreeFilesSet.add(cmpFileName.fileName.substring(0, index));
- }
- validateFiles(dev, btreeFilesSet, allBloomFilterFiles, bloomFilterFilter, null);
+ // Gather files from the IODeviceHandle.
+
+ // List of valid BTree files.
+ cleanupAndGetValidFilesInternal(dev, btreeFilter, btreeFactory, allBTreeFiles);
+ HashSet<String> btreeFilesSet = new HashSet<String>();
+ for (ComparableFileName cmpFileName : allBTreeFiles) {
+ int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
+ btreeFilesSet.add(cmpFileName.fileName.substring(0, index));
}
+ validateFiles(dev, btreeFilesSet, allBloomFilterFiles, bloomFilterFilter, null);
// Sanity check.
if (allBTreeFiles.size() != allBloomFilterFiles.size()) {
diff --git a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeUtils.java b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeUtils.java
index ac20b6d..d92e93d 100644
--- a/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeUtils.java
+++ b/hyracks/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeUtils.java
@@ -45,24 +45,12 @@
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public class LSMBTreeUtils {
-
public static LSMBTree createLSMTree(IInMemoryBufferCache memBufferCache,
IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields, ILSMMergePolicy mergePolicy,
- ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- return createLSMTree(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache, diskFileMapProvider,
- typeTraits, cmpFactories, bloomFilterKeyFields, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider, 0);
- }
-
- public static LSMBTree createLSMTree(IInMemoryBufferCache memBufferCache,
- IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
- IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields, ILSMMergePolicy mergePolicy,
- ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider, int startIODeviceIndex) {
+ IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields, double bloomFilterFalsePositiveRate,
+ ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
+ ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider, int ioDeviceId) {
LSMBTreeTupleWriterFactory insertTupleWriterFactory = new LSMBTreeTupleWriterFactory(typeTraits,
cmpFactories.length, false);
LSMBTreeTupleWriterFactory deleteTupleWriterFactory = new LSMBTreeTupleWriterFactory(typeTraits,
@@ -87,12 +75,12 @@
bloomFilterKeyFields);
ILSMIndexFileManager fileNameManager = new LSMBTreeFileManager(ioManager, diskFileMapProvider, file,
- diskBTreeFactory, startIODeviceIndex);
+ diskBTreeFactory, ioDeviceId);
LSMBTree lsmTree = new LSMBTree(memBufferCache, memFreePageManager, interiorFrameFactory,
insertLeafFrameFactory, deleteLeafFrameFactory, fileNameManager, diskBTreeFactory,
- bulkLoadBTreeFactory, bloomFilterFactory, diskFileMapProvider, typeTraits.length, cmpFactories,
- mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ bulkLoadBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate, diskFileMapProvider,
+ typeTraits.length, cmpFactories, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
return lsmTree;
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-common/pom.xml b/hyracks/hyracks-storage-am-lsm-common/pom.xml
index e4d2040..715ec27 100644
--- a/hyracks/hyracks-storage-am-lsm-common/pom.xml
+++ b/hyracks/hyracks-storage-am-lsm-common/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,21 +25,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-bloomfilter</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelper.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelper.java
index ea7c3b4..6202518 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelper.java
@@ -27,9 +27,11 @@
protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
protected static int DEFAULT_MEM_NUM_PAGES = 1000;
+ protected static double DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE = 0.01;
protected final int memPageSize;
protected final int memNumPages;
+ protected final double bloomFilterFalsePositiveRate;
protected final ILSMMergePolicy mergePolicy;
protected final ILSMIOOperationScheduler ioScheduler;
@@ -39,17 +41,19 @@
public AbstractLSMIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
- ioScheduler, ioOpCallbackProvider);
+ this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES,
+ DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider);
}
public AbstractLSMIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
- int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
+ int memPageSize, int memNumPages, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
super(opDesc, ctx, partition);
this.memPageSize = memPageSize;
this.memNumPages = memNumPages;
+ this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
this.mergePolicy = mergePolicy;
this.opTrackerFactory = opTrackerFactory;
this.ioScheduler = ioScheduler;
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelperFactory.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelperFactory.java
index a2f2a11..bc4271c 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelperFactory.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/AbstractLSMIndexDataflowHelperFactory.java
@@ -30,15 +30,18 @@
protected final ILSMIOOperationCallbackProvider ioOpCallbackProvider;
protected final int memPageSize;
protected final int memNumPages;
+ protected final double bloomFilterFalsePositiveRate;
public AbstractLSMIndexDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages,
+ double bloomFilterFalsePositiveRate) {
this.mergePolicyProvider = mergePolicyProvider;
this.opTrackerFactory = opTrackerFactory;
this.ioSchedulerProvider = ioSchedulerProvider;
this.ioOpCallbackProvider = ioOpCallbackProvider;
this.memPageSize = memPageSize;
this.memNumPages = memNumPages;
+ this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMIndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMIndexInsertUpdateDeleteOperatorNodePushable.java
index baa9648..a3e84ed 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMIndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/dataflow/LSMIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -113,7 +113,9 @@
appender.reset(writeBuffer, true);
for (int i = startTupleIndex; i < endTupleIndex; i++) {
if (!appender.append(accessor, i)) {
- throw new IllegalStateException("Failed to append tuple into frame.");
+ throw new HyracksDataException("Record size ("
+ + (accessor.getTupleEndOffset(i) - accessor.getTupleStartOffset(i))
+ + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
FrameUtils.flushFrame(writeBuffer, writer);
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
index e4dd369..6782fbe 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndex.java
@@ -39,8 +39,6 @@
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public abstract class AbstractLSMIndex implements ILSMIndexInternal {
- protected final static double MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE = 0.01;
-
protected final ILSMHarness lsmHarness;
protected final ILSMIOOperationScheduler ioScheduler;
@@ -54,19 +52,22 @@
protected final ILSMIndexFileManager fileManager;
protected final IFileMapProvider diskFileMapProvider;
protected final AtomicReference<List<ILSMComponent>> componentsRef;
+ protected final double bloomFilterFalsePositiveRate;
protected boolean isActivated;
private boolean needsFlush = false;
public AbstractLSMIndex(IInMemoryFreePageManager memFreePageManager, IBufferCache diskBufferCache,
- ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider, ILSMMergePolicy mergePolicy,
+ ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider,
+ double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
this.memFreePageManager = memFreePageManager;
this.diskBufferCache = diskBufferCache;
this.diskFileMapProvider = diskFileMapProvider;
this.fileManager = fileManager;
+ this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
this.ioScheduler = ioScheduler;
this.ioOpCallbackProvider = ioOpCallbackProvider;
ILSMOperationTracker opTracker = opTrackerFactory.createOperationTracker(this);
@@ -190,4 +191,9 @@
public IBufferCache getBufferCache() {
return diskBufferCache;
}
+
+ @Override
+ public String toString() {
+ return "LSMIndex [" + fileManager.getBaseDir() + "]";
+ }
}
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
index a808143..99b62d8 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/AbstractLSMIndexFileManager.java
@@ -44,10 +44,8 @@
protected static final String SPLIT_STRING = "_";
protected static final String BLOOM_FILTER_STRING = "f";
- // Use all IODevices registered in ioManager in a round-robin fashion to choose
- // where to flush and merge
- protected final IIOManager ioManager;
protected final IFileMapProvider fileMapProvider;
+ protected final IODeviceHandle dev;
// baseDir should reflect dataset name and partition name.
protected String baseDir;
@@ -57,19 +55,15 @@
protected final TreeIndexFactory<? extends ITreeIndex> treeFactory;
- // The current index for the round-robin file assignment
- private int ioDeviceIndex = 0;
-
public AbstractLSMIndexFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
- TreeIndexFactory<? extends ITreeIndex> treeFactory, int startIODeviceIndex) {
+ TreeIndexFactory<? extends ITreeIndex> treeFactory, int ioDeviceId) {
this.baseDir = file.getFile().getPath();
if (!baseDir.endsWith(System.getProperty("file.separator"))) {
baseDir += System.getProperty("file.separator");
}
this.fileMapProvider = fileMapProvider;
- this.ioManager = ioManager;
this.treeFactory = treeFactory;
- ioDeviceIndex = startIODeviceIndex % ioManager.getIODevices().size();
+ this.dev = ioManager.getIODevices().get(ioDeviceId);
}
private static FilenameFilter fileNameFilter = new FilenameFilter() {
@@ -135,18 +129,14 @@
@Override
public void createDirs() {
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- File f = new File(dev.getPath(), baseDir);
- f.mkdirs();
- }
+ File f = new File(dev.getPath(), baseDir);
+ f.mkdirs();
}
@Override
public void deleteDirs() {
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- File f = new File(dev.getPath(), baseDir);
- delete(f);
- }
+ File f = new File(dev.getPath(), baseDir);
+ delete(f);
}
private void delete(File f) {
@@ -165,9 +155,6 @@
};
protected FileReference createFlushFile(String relFlushFileName) {
- // Assigns new files to I/O devices in round-robin fashion.
- IODeviceHandle dev = ioManager.getIODevices().get(ioDeviceIndex);
- ioDeviceIndex = (ioDeviceIndex + 1) % ioManager.getIODevices().size();
return dev.createFileReference(relFlushFileName);
}
@@ -198,14 +185,12 @@
List<LSMComponentFileReferences> validFiles = new ArrayList<LSMComponentFileReferences>();
ArrayList<ComparableFileName> allFiles = new ArrayList<ComparableFileName>();
- // Gather files from all IODeviceHandles and delete invalid files
+ // Gather files from the IODeviceHandle and delete invalid files
// There are two types of invalid files:
// (1) The isValid flag is not set
// (2) The file's interval is contained by some other file
// Here, we only filter out (1).
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- cleanupAndGetValidFilesInternal(dev, fileNameFilter, treeFactory, allFiles);
- }
+ cleanupAndGetValidFilesInternal(dev, fileNameFilter, treeFactory, allFiles);
if (allFiles.isEmpty()) {
return validFiles;
diff --git a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index 65e6a3d..7372b86 100644
--- a/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -16,8 +16,9 @@
package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
import java.util.ArrayList;
-import java.util.Collections;
import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -35,6 +36,8 @@
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMOperationTracker;
public class LSMHarness implements ILSMHarness {
+ private static final Logger LOGGER = Logger.getLogger(LSMHarness.class.getName());
+
private final ILSMIndexInternal lsmIndex;
private final ILSMMergePolicy mergePolicy;
private final ILSMOperationTracker opTracker;
@@ -171,7 +174,7 @@
}
lsmIndex.setFlushStatus(false);
-
+
if (!lsmIndex.scheduleFlush(ctx, callback)) {
callback.beforeOperation();
callback.afterOperation(null, null);
@@ -184,7 +187,11 @@
public void flush(ILSMIndexOperationContext ctx, ILSMIOOperation operation) throws HyracksDataException,
IndexException {
operation.getCallback().beforeOperation();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(lsmIndex + ": flushing");
+ }
ILSMComponent newComponent = lsmIndex.flush(operation);
+
operation.getCallback().afterOperation(null, newComponent);
lsmIndex.markAsValid(newComponent);
operation.getCallback().afterFinalize(newComponent);
@@ -215,6 +222,9 @@
IndexException {
List<ILSMComponent> mergedComponents = new ArrayList<ILSMComponent>();
operation.getCallback().beforeOperation();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(lsmIndex + ": merging");
+ }
ILSMComponent newComponent = lsmIndex.merge(mergedComponents, operation);
ctx.getComponentHolder().addAll(mergedComponents);
operation.getCallback().afterOperation(mergedComponents, newComponent);
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml b/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
index 1aa5302..5a056a2 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>hyracks</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
@@ -26,14 +26,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
index 6f28f61..5846f1b 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
@@ -116,7 +116,7 @@
FrameUtils.flushFrame(writeBuffer, writer);
appender.reset(writeBuffer, true);
if (!appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
- throw new IllegalStateException();
+ throw new HyracksDataException("Record size (" + builder.getSize() +") larger than frame size (" + appender.getBuffer().capacity() + ")");
}
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
index 3d8b391..c97e10d 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelper.java
@@ -42,16 +42,17 @@
public LSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
- ioScheduler, ioOpCallbackProvider);
+ this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES,
+ DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider);
}
public LSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
- int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
+ int memPageSize, int memNumPages, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider);
+ super(opDesc, ctx, partition, memPageSize, memNumPages, bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory, ioScheduler, ioOpCallbackProvider);
}
@Override
@@ -66,11 +67,12 @@
IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
LSMInvertedIndex invIndex = InvertedIndexUtils.createLSMInvertedIndex(memBufferCache, memFreePageManager,
- diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
- invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
- invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
- diskBufferCache, ctx.getIOManager(), file.getFile().getPath(), mergePolicy, opTrackerFactory,
- ioScheduler, ioOpCallbackProvider, partition);
+ diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(), invIndexOpDesc
+ .getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(), invIndexOpDesc
+ .getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(), diskBufferCache, ctx
+ .getIOManager(), file.getFile().getPath(), bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory, ioScheduler, ioOpCallbackProvider,
+ opDesc.getFileSplitProvider().getFileSplits()[partition].getIODeviceId());
return invIndex;
} catch (IndexException e) {
throw new HyracksDataException(e);
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelperFactory.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelperFactory.java
index 9796ebc..60a0b47 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelperFactory.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/LSMInvertedIndexDataflowHelperFactory.java
@@ -30,16 +30,18 @@
public LSMInvertedIndexDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
ILSMOperationTrackerFactory opTrackerProvider, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages,
+ double bloomFilterFalsePositiveRate) {
super(mergePolicyProvider, opTrackerProvider, ioSchedulerProvider, ioOpCallbackProvider, memPageSize,
- memNumPages);
+ memNumPages, bloomFilterFalsePositiveRate);
}
@Override
public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
int partition) {
- return new LSMInvertedIndexDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicyProvider.getMergePolicy(ctx),
- opTrackerFactory, ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider);
+ return new LSMInvertedIndexDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages,
+ bloomFilterFalsePositiveRate, mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory,
+ ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider);
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
index c5b4f07..6c5b25f 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelper.java
@@ -42,16 +42,17 @@
public PartitionedLSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
int partition, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, mergePolicy, opTrackerFactory,
- ioScheduler, ioOpCallbackProvider);
+ this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES,
+ DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider);
}
public PartitionedLSMInvertedIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
- int partition, int memPageSize, int memNumPages, ILSMMergePolicy mergePolicy,
- ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider);
+ int partition, int memPageSize, int memNumPages, double bloomFilterFalsePositiveRate,
+ ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
+ ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
+ super(opDesc, ctx, partition, memPageSize, memNumPages, bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory, ioScheduler, ioOpCallbackProvider);
}
@Override
@@ -66,11 +67,12 @@
IBufferCache diskBufferCache = opDesc.getStorageManager().getBufferCache(ctx);
IFileMapProvider diskFileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
PartitionedLSMInvertedIndex invIndex = InvertedIndexUtils.createPartitionedLSMInvertedIndex(memBufferCache,
- memFreePageManager, diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(),
- invIndexOpDesc.getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(),
- invIndexOpDesc.getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(),
- diskBufferCache, ctx.getIOManager(), file.getFile().getPath(), mergePolicy, opTrackerFactory,
- ioScheduler, ioOpCallbackProvider, partition);
+ memFreePageManager, diskFileMapProvider, invIndexOpDesc.getInvListsTypeTraits(), invIndexOpDesc
+ .getInvListsComparatorFactories(), invIndexOpDesc.getTokenTypeTraits(), invIndexOpDesc
+ .getTokenComparatorFactories(), invIndexOpDesc.getTokenizerFactory(), diskBufferCache, ctx
+ .getIOManager(), file.getFile().getPath(), bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory, ioScheduler, ioOpCallbackProvider,
+ opDesc.getFileSplitProvider().getFileSplits()[partition].getIODeviceId());
return invIndex;
} catch (IndexException e) {
throw new HyracksDataException(e);
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelperFactory.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelperFactory.java
index 8a8aad2..801462b 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelperFactory.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/dataflow/PartitionedLSMInvertedIndexDataflowHelperFactory.java
@@ -30,17 +30,17 @@
public PartitionedLSMInvertedIndexDataflowHelperFactory(ILSMMergePolicyProvider mergePolicyProvider,
ILSMOperationTrackerFactory opTrackerProvider, ILSMIOOperationSchedulerProvider ioSchedulerProvider,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages) {
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int memPageSize, int memNumPages,
+ double bloomFilterFalsePositiveRate) {
super(mergePolicyProvider, opTrackerProvider, ioSchedulerProvider, ioOpCallbackProvider, memNumPages,
- memNumPages);
+ memNumPages, bloomFilterFalsePositiveRate);
}
@Override
public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
int partition) {
return new PartitionedLSMInvertedIndexDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages,
- mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory, ioSchedulerProvider.getIOScheduler(ctx),
- ioOpCallbackProvider);
+ bloomFilterFalsePositiveRate, mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory,
+ ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider);
}
-
}
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
index 33052f3..beb4dc9 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndex.java
@@ -97,15 +97,15 @@
public LSMInvertedIndex(IInMemoryBufferCache memBufferCache, IInMemoryFreePageManager memFreePageManager,
OnDiskInvertedIndexFactory diskInvIndexFactory, BTreeFactory deletedKeysBTreeFactory,
- BloomFilterFactory bloomFilterFactory, ILSMIndexFileManager fileManager,
- IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
+ BloomFilterFactory bloomFilterFactory, double bloomFilterFalsePositiveRate,
+ ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
throws IndexException {
- super(memFreePageManager, diskInvIndexFactory.getBufferCache(), fileManager, diskFileMapProvider, mergePolicy,
- opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ super(memFreePageManager, diskInvIndexFactory.getBufferCache(), fileManager, diskFileMapProvider,
+ bloomFilterFalsePositiveRate, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
this.memFreePageManager = memFreePageManager;
this.tokenizerFactory = tokenizerFactory;
this.invListTypeTraits = invListTypeTraits;
@@ -314,6 +314,7 @@
throw new UnsupportedOperationException("Operation " + ctx.getOperation() + " not supported.");
}
}
+ mutableComponent.setIsModified();
}
@Override
@@ -461,7 +462,7 @@
if (numBTreeTuples > 0) {
int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numBTreeTuples);
BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
- MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
+ bloomFilterFalsePositiveRate);
// Create an BTree instance for the deleted keys.
BTree diskDeletedKeysBTree = component.getDeletedKeysBTree();
@@ -566,6 +567,7 @@
public class LSMInvertedIndexBulkLoader implements IIndexBulkLoader {
private final ILSMComponent component;
private final IIndexBulkLoader invIndexBulkLoader;
+ private boolean exceptionCaught = false;
public LSMInvertedIndexBulkLoader(float fillFactor, boolean verifyInput, long numElementsHint)
throws IndexException {
@@ -599,6 +601,7 @@
}
protected void handleException() throws HyracksDataException {
+ exceptionCaught = true;
((LSMInvertedIndexImmutableComponent) component).getInvIndex().deactivate();
((LSMInvertedIndexImmutableComponent) component).getInvIndex().destroy();
((LSMInvertedIndexImmutableComponent) component).getDeletedKeysBTree().deactivate();
@@ -609,7 +612,9 @@
@Override
public void end() throws IndexException, HyracksDataException {
- invIndexBulkLoader.end();
+ if (!exceptionCaught) {
+ invIndexBulkLoader.end();
+ }
lsmHarness.addBulkLoadedComponent(component);
}
}
@@ -732,4 +737,9 @@
component.getDeletedKeysBTree().validate();
}
}
+
+ @Override
+ public String toString() {
+ return "LSMInvertedIndex [" + fileManager.getBaseDir() + "]";
+ }
}
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
index ccba624..aa1dbeb 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/LSMInvertedIndexFileManager.java
@@ -27,7 +27,6 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
@@ -63,8 +62,8 @@
};
public LSMInvertedIndexFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
- BTreeFactory btreeFactory, int startIODeviceIndex) {
- super(ioManager, fileMapProvider, file, null, startIODeviceIndex);
+ BTreeFactory btreeFactory, int ioDeviceId) {
+ super(ioManager, fileMapProvider, file, null, ioDeviceId);
this.btreeFactory = btreeFactory;
}
@@ -100,20 +99,19 @@
ArrayList<ComparableFileName> allDeletedKeysBTreeFiles = new ArrayList<ComparableFileName>();
ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<ComparableFileName>();
- // Gather files from all IODeviceHandles.
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- cleanupAndGetValidFilesInternal(dev, deletedKeysBTreeFilter, btreeFactory, allDeletedKeysBTreeFiles);
- HashSet<String> deletedKeysBTreeFilesSet = new HashSet<String>();
- for (ComparableFileName cmpFileName : allDeletedKeysBTreeFiles) {
- int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
- deletedKeysBTreeFilesSet.add(cmpFileName.fileName.substring(0, index));
- }
-
- // TODO: do we really need to validate the inverted lists files or is validating the dict. BTrees is enough?
- validateFiles(dev, deletedKeysBTreeFilesSet, allInvListsFiles, invListFilter, null);
- validateFiles(dev, deletedKeysBTreeFilesSet, allDictBTreeFiles, dictBTreeFilter, btreeFactory);
- validateFiles(dev, deletedKeysBTreeFilesSet, allBloomFilterFiles, bloomFilterFilter, null);
+ // Gather files from the IODeviceHandle.
+ cleanupAndGetValidFilesInternal(dev, deletedKeysBTreeFilter, btreeFactory, allDeletedKeysBTreeFiles);
+ HashSet<String> deletedKeysBTreeFilesSet = new HashSet<String>();
+ for (ComparableFileName cmpFileName : allDeletedKeysBTreeFiles) {
+ int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
+ deletedKeysBTreeFilesSet.add(cmpFileName.fileName.substring(0, index));
}
+
+ // TODO: do we really need to validate the inverted lists files or is validating the dict. BTrees is enough?
+ validateFiles(dev, deletedKeysBTreeFilesSet, allInvListsFiles, invListFilter, null);
+ validateFiles(dev, deletedKeysBTreeFilesSet, allDictBTreeFiles, dictBTreeFilter, btreeFactory);
+ validateFiles(dev, deletedKeysBTreeFilesSet, allBloomFilterFiles, bloomFilterFilter, null);
+
// Sanity check.
if (allDictBTreeFiles.size() != allInvListsFiles.size()
|| allDictBTreeFiles.size() != allDeletedKeysBTreeFiles.size()
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
index 1b293eb..a45f729 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/impls/PartitionedLSMInvertedIndex.java
@@ -38,15 +38,17 @@
public PartitionedLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
IInMemoryFreePageManager memFreePageManager, OnDiskInvertedIndexFactory diskInvIndexFactory,
BTreeFactory deletedKeysBTreeFactory, BloomFilterFactory bloomFilterFactory,
- ILSMIndexFileManager fileManager, IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
+ double bloomFilterFalsePositiveRate, ILSMIndexFileManager fileManager,
+ IFileMapProvider diskFileMapProvider, ITypeTraits[] invListTypeTraits,
IBinaryComparatorFactory[] invListCmpFactories, ITypeTraits[] tokenTypeTraits,
IBinaryComparatorFactory[] tokenCmpFactories, IBinaryTokenizerFactory tokenizerFactory,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
throws IndexException {
super(memBufferCache, memFreePageManager, diskInvIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory,
- fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
- tokenCmpFactories, tokenizerFactory, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ bloomFilterFalsePositiveRate, fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories,
+ tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider);
}
protected InMemoryInvertedIndex createInMemoryInvertedIndex(IInMemoryBufferCache memBufferCache)
diff --git a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
index 79c8ccf..06be7aa 100644
--- a/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
+++ b/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexUtils.java
@@ -122,22 +122,9 @@
ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
- String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
- throws IndexException {
- return createLSMInvertedIndex(memBufferCache, memFreePageManager, diskFileMapProvider, invListTypeTraits,
- invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, diskBufferCache, ioManager,
- onDiskDir, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider, 0);
- }
-
- public static LSMInvertedIndex createLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
- IInMemoryFreePageManager memFreePageManager, IFileMapProvider diskFileMapProvider,
- ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
- ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
- IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
- String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
- int startIODeviceIndex) throws IndexException {
+ String onDiskDir, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+ ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int ioDeviceId) throws IndexException {
BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(diskFileMapProvider, invListTypeTraits,
invListCmpFactories, diskBufferCache);
@@ -151,7 +138,7 @@
FileReference onDiskDirFileRef = new FileReference(new File(onDiskDir));
LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(ioManager, diskFileMapProvider,
- onDiskDirFileRef, deletedKeysBTreeFactory, startIODeviceIndex);
+ onDiskDirFileRef, deletedKeysBTreeFactory, ioDeviceId);
IInvertedListBuilderFactory invListBuilderFactory = new FixedSizeElementInvertedListBuilderFactory(
invListTypeTraits);
@@ -160,9 +147,9 @@
tokenCmpFactories, fileManager);
LSMInvertedIndex invIndex = new LSMInvertedIndex(memBufferCache, memFreePageManager, invIndexFactory,
- deletedKeysBTreeFactory, bloomFilterFactory, fileManager, diskFileMapProvider, invListTypeTraits,
- invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory, mergePolicy,
- opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ deletedKeysBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate, fileManager,
+ diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories,
+ tokenizerFactory, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
return invIndex;
}
@@ -171,23 +158,9 @@
ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
- String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
- throws IndexException {
- return createPartitionedLSMInvertedIndex(memBufferCache, memFreePageManager, diskFileMapProvider,
- invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
- diskBufferCache, ioManager, onDiskDir, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider, 0);
- }
-
- public static PartitionedLSMInvertedIndex createPartitionedLSMInvertedIndex(IInMemoryBufferCache memBufferCache,
- IInMemoryFreePageManager memFreePageManager, IFileMapProvider diskFileMapProvider,
- ITypeTraits[] invListTypeTraits, IBinaryComparatorFactory[] invListCmpFactories,
- ITypeTraits[] tokenTypeTraits, IBinaryComparatorFactory[] tokenCmpFactories,
- IBinaryTokenizerFactory tokenizerFactory, IBufferCache diskBufferCache, IIOManager ioManager,
- String onDiskDir, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
- int startIODeviceIndex) throws IndexException {
+ String onDiskDir, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+ ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int ioDeviceId) throws IndexException {
BTreeFactory deletedKeysBTreeFactory = createDeletedKeysBTreeFactory(diskFileMapProvider, invListTypeTraits,
invListCmpFactories, diskBufferCache);
@@ -201,7 +174,7 @@
FileReference onDiskDirFileRef = new FileReference(new File(onDiskDir));
LSMInvertedIndexFileManager fileManager = new LSMInvertedIndexFileManager(ioManager, diskFileMapProvider,
- onDiskDirFileRef, deletedKeysBTreeFactory, startIODeviceIndex);
+ onDiskDirFileRef, deletedKeysBTreeFactory, ioDeviceId);
IInvertedListBuilderFactory invListBuilderFactory = new FixedSizeElementInvertedListBuilderFactory(
invListTypeTraits);
@@ -210,9 +183,9 @@
tokenTypeTraits, tokenCmpFactories, fileManager);
PartitionedLSMInvertedIndex invIndex = new PartitionedLSMInvertedIndex(memBufferCache, memFreePageManager,
- invIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory, fileManager, diskFileMapProvider,
- invListTypeTraits, invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
- mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ invIndexFactory, deletedKeysBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate,
+ fileManager, diskFileMapProvider, invListTypeTraits, invListCmpFactories, tokenTypeTraits,
+ tokenCmpFactories, tokenizerFactory, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
return invIndex;
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/pom.xml b/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
index a345ee5..f842b44 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
+++ b/hyracks/hyracks-storage-am-lsm-rtree/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,21 +25,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/AbstractLSMRTreeDataflowHelper.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/AbstractLSMRTreeDataflowHelper.java
index c363c99..5fcdcea 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/AbstractLSMRTreeDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/AbstractLSMRTreeDataflowHelper.java
@@ -55,19 +55,20 @@
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
ILinearizeComparatorFactory linearizeCmpFactory) {
- this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES, btreeComparatorFactories,
- valueProviderFactories, rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider, linearizeCmpFactory);
+ this(opDesc, ctx, partition, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES,
+ DEFAULT_BLOOM_FILTER_FALSE_POSITIVE_RATE, btreeComparatorFactories, valueProviderFactories,
+ rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory);
}
public AbstractLSMRTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
- int memPageSize, int memNumPages, IBinaryComparatorFactory[] btreeComparatorFactories,
+ int memPageSize, int memNumPages, double bloomFilterFalsePositiveRate,
+ IBinaryComparatorFactory[] btreeComparatorFactories,
IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
ILinearizeComparatorFactory linearizeCmpFactory) {
- super(opDesc, ctx, partition, memPageSize, memNumPages, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider);
+ super(opDesc, ctx, partition, memPageSize, memNumPages, bloomFilterFalsePositiveRate, mergePolicy,
+ opTrackerFactory, ioScheduler, ioOpCallbackProvider);
this.btreeComparatorFactories = btreeComparatorFactories;
this.valueProviderFactories = valueProviderFactories;
this.rtreePolicyType = rtreePolicyType;
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
index 1df914e..5ab5513 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelper.java
@@ -50,13 +50,15 @@
}
public LSMRTreeDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx, int partition,
- int memPageSize, int memNumPages, IBinaryComparatorFactory[] btreeComparatorFactories,
+ int memPageSize, int memNumPages, double bloomFilterFalsePositiveRate,
+ IBinaryComparatorFactory[] btreeComparatorFactories,
IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
ILinearizeComparatorFactory linearizeCmpFactory) {
- super(opDesc, ctx, partition, memPageSize, memNumPages, btreeComparatorFactories, valueProviderFactories,
- rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory);
+ super(opDesc, ctx, partition, memPageSize, memNumPages, bloomFilterFalsePositiveRate, btreeComparatorFactories,
+ valueProviderFactories, rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider, linearizeCmpFactory);
}
@Override
@@ -69,8 +71,9 @@
try {
return LSMRTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache,
diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
- rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider,
- linearizeCmpFactory, startIODeviceIndex);
+ rtreePolicyType, bloomFilterFalsePositiveRate, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider, linearizeCmpFactory,
+ opDesc.getFileSplitProvider().getFileSplits()[partition].getIODeviceId());
} catch (TreeIndexException e) {
throw new HyracksDataException(e);
}
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelperFactory.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelperFactory.java
index a730895..0778e15 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelperFactory.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeDataflowHelperFactory.java
@@ -41,9 +41,10 @@
RTreePolicyType rtreePolicyType, IBinaryComparatorFactory[] btreeComparatorFactories,
ILSMMergePolicyProvider mergePolicyProvider, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationSchedulerProvider ioSchedulerProvider, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
- ILinearizeComparatorFactory linearizeCmpFactory, int memPageSize, int memNumPages) {
+ ILinearizeComparatorFactory linearizeCmpFactory, int memPageSize, int memNumPages,
+ double bloomFilterFalsePositiveRate) {
super(mergePolicyProvider, opTrackerFactory, ioSchedulerProvider, ioOpCallbackProvider, memPageSize,
- memNumPages);
+ memNumPages, bloomFilterFalsePositiveRate);
this.btreeComparatorFactories = btreeComparatorFactories;
this.valueProviderFactories = valueProviderFactories;
this.rtreePolicyType = rtreePolicyType;
@@ -53,8 +54,9 @@
@Override
public IndexDataflowHelper createIndexDataflowHelper(IIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
int partition) {
- return new LSMRTreeDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages, btreeComparatorFactories, valueProviderFactories,
- rtreePolicyType, mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory,
- ioSchedulerProvider.getIOScheduler(ctx), ioOpCallbackProvider, linearizeCmpFactory);
+ return new LSMRTreeDataflowHelper(opDesc, ctx, partition, memPageSize, memNumPages,
+ bloomFilterFalsePositiveRate, btreeComparatorFactories, valueProviderFactories, rtreePolicyType,
+ mergePolicyProvider.getMergePolicy(ctx), opTrackerFactory, ioSchedulerProvider.getIOScheduler(ctx),
+ ioOpCallbackProvider, linearizeCmpFactory);
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
index 6f5ecb1..d4fac8f 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/dataflow/LSMRTreeWithAntiMatterTuplesDataflowHelper.java
@@ -54,7 +54,7 @@
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
ILinearizeComparatorFactory linearizeCmpFactory) {
- super(opDesc, ctx, partition, memPageSize, memNumPages, btreeComparatorFactories, valueProviderFactories,
+ super(opDesc, ctx, partition, memPageSize, memNumPages, 0, btreeComparatorFactories, valueProviderFactories,
rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory);
}
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
index 9b377a9..f62fab1 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/AbstractLSMRTree.java
@@ -26,6 +26,7 @@
import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree.BTreeAccessor;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
import edu.uci.ics.hyracks.storage.am.common.api.IInMemoryFreePageManager;
@@ -55,6 +56,7 @@
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree.RTreeAccessor;
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public abstract class AbstractLSMRTree extends AbstractLSMIndex implements ITreeIndex {
@@ -89,10 +91,11 @@
ILSMComponentFactory componentFactory, IFileMapProvider diskFileMapProvider, int fieldCount,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
ILinearizeComparatorFactory linearizer, int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray,
- ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
- super(memFreePageManager, diskRTreeFactory.getBufferCache(), fileManager, diskFileMapProvider, mergePolicy,
- opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+ ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
+ super(memFreePageManager, diskRTreeFactory.getBufferCache(), fileManager, diskFileMapProvider,
+ bloomFilterFalsePositiveRate, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
RTree memRTree = new RTree(memBufferCache, ((InMemoryBufferCache) memBufferCache).getFileMapProvider(),
memFreePageManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory, rtreeCmpFactories, fieldCount,
new FileReference(new File("memrtree")));
@@ -279,6 +282,8 @@
throw new UnsupportedOperationException("Physical delete not supported in the LSM-RTree");
}
+ ctx.modificationCallback.before(tuple);
+ ctx.modificationCallback.found(null, tuple);
if (ctx.getOperation() == IndexOperation.INSERT) {
// Before each insert, we must check whether there exist a killer
// tuple in the memBTree. If we find a killer tuple, we must truly
@@ -323,13 +328,16 @@
}
protected LSMRTreeOpContext createOpContext(IModificationOperationCallback modCallback) {
- return new LSMRTreeOpContext((RTree.RTreeAccessor) mutableComponent.getRTree().createAccessor(modCallback,
- NoOpOperationCallback.INSTANCE), (IRTreeLeafFrame) rtreeLeafFrameFactory.createFrame(),
+ RTreeAccessor rtreeAccessor = (RTree.RTreeAccessor) mutableComponent.getRTree().createAccessor(
+ NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+ BTreeAccessor btreeAccessor = (BTree.BTreeAccessor) mutableComponent.getBTree().createAccessor(
+ NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
+
+ return new LSMRTreeOpContext(rtreeAccessor, (IRTreeLeafFrame) rtreeLeafFrameFactory.createFrame(),
(IRTreeInteriorFrame) rtreeInteriorFrameFactory.createFrame(), memFreePageManager
- .getMetaDataFrameFactory().createFrame(), 4, (BTree.BTreeAccessor) mutableComponent.getBTree()
- .createAccessor(modCallback, NoOpOperationCallback.INSTANCE), btreeLeafFrameFactory,
+ .getMetaDataFrameFactory().createFrame(), 4, btreeAccessor, btreeLeafFrameFactory,
btreeInteriorFrameFactory, memFreePageManager.getMetaDataFrameFactory().createFrame(),
- rtreeCmpFactories, btreeCmpFactories, null, null);
+ rtreeCmpFactories, btreeCmpFactories, modCallback, NoOpOperationCallback.INSTANCE);
}
@Override
@@ -355,4 +363,9 @@
InMemoryBufferCache memBufferCache = (InMemoryBufferCache) mutableComponent.getRTree().getBufferCache();
return memBufferCache.getNumPages() * memBufferCache.getPageSize();
}
+
+ @Override
+ public String toString() {
+ return "LSMRTree [" + fileManager.getBaseDir() + "]";
+ }
}
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
index fc5b06d..367b387 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
@@ -72,16 +72,17 @@
ITreeIndexFrameFactory btreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
ILSMIndexFileManager fileNameManager, TreeIndexFactory<RTree> diskRTreeFactory,
TreeIndexFactory<BTree> diskBTreeFactory, BloomFilterFactory bloomFilterFactory,
- IFileMapProvider diskFileMapProvider, int fieldCount, IBinaryComparatorFactory[] rtreeCmpFactories,
- IBinaryComparatorFactory[] btreeCmpFactories, ILinearizeComparatorFactory linearizer,
- int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray, ILSMMergePolicy mergePolicy,
- ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
- ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
+ double bloomFilterFalsePositiveRate, IFileMapProvider diskFileMapProvider, int fieldCount,
+ IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
+ ILinearizeComparatorFactory linearizer, int[] comparatorFields, IBinaryComparatorFactory[] linearizerArray,
+ ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
+ ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider) {
super(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager, diskRTreeFactory,
new LSMRTreeComponentFactory(diskRTreeFactory, diskBTreeFactory, bloomFilterFactory),
diskFileMapProvider, fieldCount, rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields,
- linearizerArray, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ linearizerArray, bloomFilterFalsePositiveRate, mergePolicy, opTrackerFactory, ioScheduler,
+ ioOpCallbackProvider);
}
/**
@@ -303,7 +304,7 @@
if (numBTreeTuples > 0) {
int maxBucketsPerElement = BloomCalculations.maxBucketsPerElement(numBTreeTuples);
BloomFilterSpecification bloomFilterSpec = BloomCalculations.computeBloomSpec(maxBucketsPerElement,
- MAX_BLOOM_FILTER_ACCEPTABLE_FALSE_POSITIVE_RATE);
+ bloomFilterFalsePositiveRate);
IIndexCursor btreeScanCursor = memBTreeAccessor.createSearchCursor();
memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
index 851235e..fd36920 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
@@ -27,7 +27,6 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.api.io.IIOManager;
-import edu.uci.ics.hyracks.api.io.IODeviceHandle;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.AbstractLSMIndexFileManager;
@@ -56,8 +55,8 @@
public LSMRTreeFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
TreeIndexFactory<? extends ITreeIndex> rtreeFactory, TreeIndexFactory<? extends ITreeIndex> btreeFactory,
- int startIODeviceIndex) {
- super(ioManager, fileMapProvider, file, null, startIODeviceIndex);
+ int ioDeviceId) {
+ super(ioManager, fileMapProvider, file, null, ioDeviceId);
this.rtreeFactory = rtreeFactory;
this.btreeFactory = btreeFactory;
}
@@ -93,17 +92,16 @@
ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<ComparableFileName>();
ArrayList<ComparableFileName> allBloomFilterFiles = new ArrayList<ComparableFileName>();
- // Gather files from all IODeviceHandles.
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- cleanupAndGetValidFilesInternal(dev, btreeFilter, btreeFactory, allBTreeFiles);
- HashSet<String> btreeFilesSet = new HashSet<String>();
- for (ComparableFileName cmpFileName : allBTreeFiles) {
- int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
- btreeFilesSet.add(cmpFileName.fileName.substring(0, index));
- }
- validateFiles(dev, btreeFilesSet, allRTreeFiles, rtreeFilter, rtreeFactory);
- validateFiles(dev, btreeFilesSet, allBloomFilterFiles, bloomFilterFilter, null);
+ // Gather files from the IODeviceHandle.
+ cleanupAndGetValidFilesInternal(dev, btreeFilter, btreeFactory, allBTreeFiles);
+ HashSet<String> btreeFilesSet = new HashSet<String>();
+ for (ComparableFileName cmpFileName : allBTreeFiles) {
+ int index = cmpFileName.fileName.lastIndexOf(SPLIT_STRING);
+ btreeFilesSet.add(cmpFileName.fileName.substring(0, index));
}
+ validateFiles(dev, btreeFilesSet, allRTreeFiles, rtreeFilter, rtreeFactory);
+ validateFiles(dev, btreeFilesSet, allBloomFilterFiles, bloomFilterFilter, null);
+
// Sanity check.
if (allRTreeFiles.size() != allBTreeFiles.size() || allBTreeFiles.size() != allBloomFilterFiles.size()) {
throw new HyracksDataException(
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
index be9073e..a3a8ca2 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuples.java
@@ -81,7 +81,7 @@
super(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory, rtreeLeafFrameFactory,
btreeInteriorFrameFactory, btreeLeafFrameFactory, fileManager, diskRTreeFactory,
new LSMRTreeWithAntiMatterTuplesComponentFactory(diskRTreeFactory), diskFileMapProvider, fieldCount,
- rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields, linearizerArray, mergePolicy,
+ rtreeCmpFactories, btreeCmpFactories, linearizer, comparatorFields, linearizerArray, 0, mergePolicy,
opTrackerFactory, ioScheduler, ioOpCallbackProvider);
bulkLoaComponentFactory = new LSMRTreeWithAntiMatterTuplesComponentFactory(bulkLoadRTreeFactory);
this.bTreeTupleSorter = null;
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFileManager.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFileManager.java
index 6ddf766..872d152 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFileManager.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeWithAntiMatterTuplesFileManager.java
@@ -25,7 +25,7 @@
public class LSMRTreeWithAntiMatterTuplesFileManager extends AbstractLSMIndexFileManager {
public LSMRTreeWithAntiMatterTuplesFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider,
- FileReference file, TreeIndexFactory<? extends ITreeIndex> rtreeFactory, int startIODeviceIndex) {
- super(ioManager, fileMapProvider, file, rtreeFactory, startIODeviceIndex);
+ FileReference file, TreeIndexFactory<? extends ITreeIndex> rtreeFactory, int ioDeviceId) {
+ super(ioManager, fileMapProvider, file, rtreeFactory, ioDeviceId);
}
}
diff --git a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
index 6c9fce6..d299bfe 100644
--- a/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
+++ b/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
@@ -65,22 +65,10 @@
IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
- ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
- ILinearizeComparatorFactory linearizeCmpFactory) throws TreeIndexException {
- return createLSMTree(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache, diskFileMapProvider,
- typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, mergePolicy,
- opTrackerFactory, ioScheduler, ioOpCallbackProvider, linearizeCmpFactory, 0);
- }
-
- public static LSMRTree createLSMTree(IInMemoryBufferCache memBufferCache,
- IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
- IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
- IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
- ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
- ILinearizeComparatorFactory linearizeCmpFactory, int startIODeviceIndex) throws TreeIndexException {
+ double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+ ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, ILinearizeComparatorFactory linearizeCmpFactory,
+ int ioDeviceId) throws TreeIndexException {
LSMTypeAwareTupleWriterFactory rtreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
LSMTypeAwareTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
@@ -114,12 +102,12 @@
bloomFilterKeyFields);
ILSMIndexFileManager fileNameManager = new LSMRTreeFileManager(ioManager, diskFileMapProvider, file,
- diskRTreeFactory, diskBTreeFactory, startIODeviceIndex);
+ diskRTreeFactory, diskBTreeFactory, ioDeviceId);
LSMRTree lsmTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
- diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, diskFileMapProvider, typeTraits.length,
- rtreeCmpFactories, btreeCmpFactories, linearizeCmpFactory, comparatorFields, linearizerArray,
- mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ diskRTreeFactory, diskBTreeFactory, bloomFilterFactory, bloomFilterFalsePositiveRate,
+ diskFileMapProvider, typeTraits.length, rtreeCmpFactories, btreeCmpFactories, linearizeCmpFactory,
+ comparatorFields, linearizerArray, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider);
return lsmTree;
}
@@ -130,21 +118,7 @@
IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
- ILinearizeComparatorFactory linearizerCmpFactory) throws TreeIndexException {
- return createLSMTreeWithAntiMatterTuples(memBufferCache, memFreePageManager, ioManager, file, diskBufferCache,
- diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories,
- rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider,
- linearizerCmpFactory, 0);
- }
-
- public static LSMRTreeWithAntiMatterTuples createLSMTreeWithAntiMatterTuples(IInMemoryBufferCache memBufferCache,
- IInMemoryFreePageManager memFreePageManager, IIOManager ioManager, FileReference file,
- IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
- IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
- IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
- ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider,
- ILinearizeComparatorFactory linearizerCmpFactory, int startIODeviceIndex) throws TreeIndexException {
+ ILinearizeComparatorFactory linearizerCmpFactory, int ioDeviceId) throws TreeIndexException {
LSMRTreeTupleWriterFactory rtreeTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits, false);
LSMRTreeTupleWriterFactory btreeTupleWriterFactory = new LSMRTreeTupleWriterFactory(typeTraits, true);
@@ -181,7 +155,7 @@
btreeCmpFactories[btreeCmpFactories.length - 1] };
ILSMIndexFileManager fileNameManager = new LSMRTreeWithAntiMatterTuplesFileManager(ioManager,
- diskFileMapProvider, file, diskRTreeFactory, startIODeviceIndex);
+ diskFileMapProvider, file, diskRTreeFactory, ioDeviceId);
LSMRTreeWithAntiMatterTuples lsmTree = new LSMRTreeWithAntiMatterTuples(memBufferCache, memFreePageManager,
rtreeInteriorFrameFactory, rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory,
fileNameManager, diskRTreeFactory, bulkLoadRTreeFactory, diskFileMapProvider, typeTraits.length,
diff --git a/hyracks/hyracks-storage-am-rtree/pom.xml b/hyracks/hyracks-storage-am-rtree/pom.xml
index d73ce00..af5e922 100644
--- a/hyracks/hyracks-storage-am-rtree/pom.xml
+++ b/hyracks/hyracks-storage-am-rtree/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,21 +27,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index 5ab9632..f197853 100644
--- a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -48,6 +48,11 @@
}
@Override
+ public int getBytesRequriedToWriteTuple(ITupleReference tuple) {
+ return tupleWriter.bytesRequired(tuple) + childPtrSize + slotManager.getSlotSize();
+ }
+
+ @Override
public int findBestChild(ITupleReference tuple, MultiComparator cmp) {
int bestChild = rtreePolicy.findBestChildPosition(this, tuple, frameTuple, cmp);
frameTuple.resetByTupleIndex(this, bestChild);
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
index d52ef16..4057778 100644
--- a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -30,6 +30,11 @@
}
@Override
+ public int getBytesRequriedToWriteTuple(ITupleReference tuple) {
+ return tupleWriter.bytesRequired(tuple) + slotManager.getSlotSize();
+ }
+
+ @Override
public ITreeIndexTupleReference createTupleReference() {
return tupleWriter.createTupleReference();
}
diff --git a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
index c12dc50..48fb6f1 100644
--- a/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -61,12 +61,18 @@
// Global node sequence number used for the concurrency control protocol
private final AtomicLong globalNsn;
+ private final int maxTupleSize;
+
public RTree(IBufferCache bufferCache, IFileMapProvider fileMapProvider, IFreePageManager freePageManager,
ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
IBinaryComparatorFactory[] cmpFactories, int fieldCount, FileReference file) {
super(bufferCache, fileMapProvider, freePageManager, interiorFrameFactory, leafFrameFactory, cmpFactories,
fieldCount, file);
globalNsn = new AtomicLong();
+ ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+ ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
+ maxTupleSize = Math.min(leafFrame.getMaxTupleSize(bufferCache.getPageSize()),
+ interiorFrame.getMaxTupleSize(bufferCache.getPageSize()));
}
private long incrementGlobalNsn() {
@@ -147,6 +153,12 @@
private void insert(ITupleReference tuple, IIndexOperationContext ictx) throws HyracksDataException,
TreeIndexException {
RTreeOpContext ctx = (RTreeOpContext) ictx;
+ int tupleSize = Math.max(ctx.leafFrame.getBytesRequriedToWriteTuple(tuple),
+ ctx.interiorFrame.getBytesRequriedToWriteTuple(tuple));
+ if (tupleSize > maxTupleSize) {
+ throw new TreeIndexException("Record size (" + tupleSize + ") larger than maximum acceptable record size ("
+ + maxTupleSize + ")");
+ }
ctx.reset();
ctx.setTuple(tuple);
ctx.splitKey.reset();
@@ -614,7 +626,7 @@
ctx.splitKey.reset();
ctx.splitKey.getLeftTuple().setFieldCount(cmpFactories.length);
- // We delete the first matching tuple (including the payload data.
+ // We delete the first matching tuple (including the payload data).
// We don't update the MBRs of the parents after deleting the record.
int tupleIndex = findTupleToDelete(ctx);
@@ -870,8 +882,15 @@
}
@Override
- public void add(ITupleReference tuple) throws HyracksDataException {
+ public void add(ITupleReference tuple) throws IndexException, HyracksDataException {
try {
+ int tupleSize = Math.max(leafFrame.getBytesRequriedToWriteTuple(tuple),
+ interiorFrame.getBytesRequriedToWriteTuple(tuple));
+ if (tupleSize > maxTupleSize) {
+ throw new TreeIndexException("Space required for record (" + tupleSize
+ + ") larger than maximum acceptable size (" + maxTupleSize + ")");
+ }
+
NodeFrontier leafFrontier = nodeFrontiers.get(0);
int spaceNeeded = tupleWriter.bytesRequired(tuple) + slotSize;
diff --git a/hyracks/hyracks-storage-common/pom.xml b/hyracks/hyracks-storage-common/pom.xml
index bde1ac4..3ee262d 100644
--- a/hyracks/hyracks-storage-common/pom.xml
+++ b/hyracks/hyracks-storage-common/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,7 +27,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepository.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepository.java
index ab9ec41..36aa088 100644
--- a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepository.java
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/ILocalResourceRepository.java
@@ -19,16 +19,16 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
public interface ILocalResourceRepository {
-
+
public LocalResource getResourceById(long id) throws HyracksDataException;
public LocalResource getResourceByName(String name) throws HyracksDataException;
- public void insert(LocalResource resource) throws HyracksDataException;
+ public void insert(LocalResource resource, int ioDeviceId) throws HyracksDataException;
- public void deleteResourceById(long id) throws HyracksDataException;
+ public void deleteResourceById(long id, int ioDeviceId) throws HyracksDataException;
- public void deleteResourceByName(String name) throws HyracksDataException;
+ public void deleteResourceByName(String name, int ioDeviceId) throws HyracksDataException;
public List<LocalResource> getAllResources() throws HyracksDataException;
}
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepository.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepository.java
index 55bd807..f853a36 100644
--- a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepository.java
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/TransientLocalResourceRepository.java
@@ -37,7 +37,7 @@
}
@Override
- public synchronized void insert(LocalResource resource) throws HyracksDataException {
+ public synchronized void insert(LocalResource resource, int ioDeviceId) throws HyracksDataException {
long id = resource.getResourceId();
if (id2ResourceMap.containsKey(id)) {
@@ -48,7 +48,7 @@
}
@Override
- public synchronized void deleteResourceById(long id) throws HyracksDataException {
+ public synchronized void deleteResourceById(long id, int ioDeviceId) throws HyracksDataException {
LocalResource resource = id2ResourceMap.get(id);
if (resource == null) {
throw new HyracksDataException("Resource doesn't exist");
@@ -58,7 +58,7 @@
}
@Override
- public synchronized void deleteResourceByName(String name) throws HyracksDataException {
+ public synchronized void deleteResourceByName(String name, int ioDeviceId) throws HyracksDataException {
LocalResource resource = name2ResourceMap.get(name);
if (resource == null) {
throw new HyracksDataException("Resource doesn't exist");
diff --git a/hyracks/hyracks-test-support/pom.xml b/hyracks/hyracks-test-support/pom.xml
index 623edc4..217a9a4 100644
--- a/hyracks/hyracks-test-support/pom.xml
+++ b/hyracks/hyracks-test-support/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -27,33 +27,33 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
index f962200..73176f3 100644
--- a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/config/AccessMethodTestsConfig.java
@@ -53,6 +53,7 @@
public static final int LSM_RTREE_MEM_PAGE_SIZE = 512;
public static final int LSM_RTREE_MEM_NUM_PAGES = 1000;
public static final int LSM_RTREE_HYRACKS_FRAME_SIZE = 128;
+ public static final double LSM_RTREE_BLOOMFILTER_FALSE_POSITIVE_RATE = 0.01;
// Mem configuration for BTree.
public static final int BTREE_PAGE_SIZE = 256;
@@ -67,6 +68,7 @@
public static final int LSM_BTREE_MEM_PAGE_SIZE = 256;
public static final int LSM_BTREE_MEM_NUM_PAGES = 100;
public static final int LSM_BTREE_HYRACKS_FRAME_SIZE = 128;
+ public static final double LSM_BTREE_BLOOMFILTER_FALSE_POSITIVE_RATE = 0.01;
// Mem configuration for Inverted Index.
public static final int LSM_INVINDEX_DISK_PAGE_SIZE = 1024;
@@ -75,6 +77,7 @@
public static final int LSM_INVINDEX_MEM_PAGE_SIZE = 1024;
public static final int LSM_INVINDEX_MEM_NUM_PAGES = 100;
public static final int LSM_INVINDEX_HYRACKS_FRAME_SIZE = 32768;
+ public static final double LSM_INVINDEX_BLOOMFILTER_FALSE_POSITIVE_RATE = 0.01;
// Test parameters.
public static final int LSM_INVINDEX_NUM_DOCS_TO_INSERT = 100;
// Used for full-fledged search test.
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
index f93e9b6..fdb4341 100644
--- a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
@@ -225,7 +225,10 @@
int p1y = rnd.nextInt();
int p2x = rnd.nextInt();
int p2y = rnd.nextInt();
- String data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
+ String data = "";
+ for (int i = 0; i < 210; i++) {
+ data += "X";
+ }
TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
Math.max(p1y, p2y), data);
indexAccessor.insert(tuple);
@@ -261,7 +264,10 @@
p1y = rnd.nextInt();
p2x = rnd.nextInt();
p2y = rnd.nextInt();
- data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
+ data = "";
+ for (int i = 0; i < 210; i++) {
+ data += "X";
+ }
TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
Math.max(p1y, p2y), data);
indexAccessor.insert(tuple);
@@ -270,7 +276,10 @@
p1y = rnd.nextInt();
p2x = rnd.nextInt();
p2y = rnd.nextInt();
- data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
+ data = "";
+ for (int i = 0; i < 210; i++) {
+ data += "X";
+ }
TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
Math.max(p1y, p2y), data);
indexAccessor.insert(tuple);
@@ -340,7 +349,10 @@
int p1y = rnd.nextInt();
int p2x = rnd.nextInt();
int p2y = rnd.nextInt();
- String data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
+ String data = "";
+ for (int i = 0; i < 210; i++) {
+ data += "X";
+ }
TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
Math.max(p1y, p2y), data);
indexAccessor.insert(tuple);
@@ -349,7 +361,10 @@
p1y = rnd.nextInt();
p2x = rnd.nextInt();
p2y = rnd.nextInt();
- data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
+ data = "";
+ for (int i = 0; i < 210; i++) {
+ data += "X";
+ }
TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
Math.max(p1y, p2y), data);
indexAccessor.insert(tuple);
@@ -376,7 +391,10 @@
p1y = rnd.nextInt();
p2x = rnd.nextInt();
p2y = rnd.nextInt();
- data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
+ data = "";
+ for (int i = 0; i < 210; i++) {
+ data += "X";
+ }
TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
Math.max(p1y, p2y), data);
indexAccessor.insert(tuple);
@@ -385,7 +403,10 @@
p1y = rnd.nextInt();
p2x = rnd.nextInt();
p2y = rnd.nextInt();
- data = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
+ data = "";
+ for (int i = 0; i < 210; i++) {
+ data += "X";
+ }
TupleUtils.createTuple(tb, tuple, fieldSerdes, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
Math.max(p1y, p2y), data);
indexAccessor.insert(tuple);
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java
index 7612db9..8285efe 100644
--- a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestJobletContext.java
@@ -92,4 +92,18 @@
public Object getGlobalJobData() {
return null;
}
+
+ @Override
+ public Class<?> loadClass(String className) {
+ try {
+ return Class.forName(className);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public ClassLoader getClassLoader() {
+ return this.getClass().getClassLoader();
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java
index 0c64d7e..285ab1f 100644
--- a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestNCApplicationContext.java
@@ -18,6 +18,7 @@
import edu.uci.ics.hyracks.api.application.INCApplicationContext;
import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
+import edu.uci.ics.hyracks.api.job.IJobSerializerDeserializerContainer;
import edu.uci.ics.hyracks.api.messages.IMessageBroker;
public class TestNCApplicationContext implements INCApplicationContext {
@@ -68,4 +69,10 @@
// TODO Auto-generated method stub
return null;
}
+
+ @Override
+ public IJobSerializerDeserializerContainer getJobSerializerDeserializerContainer() {
+ // TODO Auto-generated method stub
+ return null;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
index 0ca93b2..8311ebd 100644
--- a/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
+++ b/hyracks/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTaskContext.java
@@ -21,6 +21,7 @@
import edu.uci.ics.hyracks.api.dataflow.TaskAttemptId;
import edu.uci.ics.hyracks.api.dataflow.state.IStateObject;
import edu.uci.ics.hyracks.api.dataset.IDatasetPartitionManager;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.io.FileReference;
@@ -107,7 +108,7 @@
}
@Override
- public void sendApplicationMessageToCC(byte[] message, String nodeId) throws Exception {
+ public void sendApplicationMessageToCC(byte[] message, DeploymentId deploymentId, String nodeId) throws Exception {
// TODO Auto-generated method stub
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
index edf9d08..aa2238f 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-bloomfilter-test/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -32,14 +32,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-bloomfilter</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index bfb2d61..8e7df1d 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -34,14 +34,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
index f687212..35ab780 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,20 +25,20 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
</dependencies>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
index 4bd1910..a7eee57 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeBulkLoadTest.java
@@ -53,9 +53,9 @@
BTreeLeafFrameType leafType) throws Exception {
return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
- harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
+ harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getBoomFilterFalsePositiveRate(),harness.getMergePolicy(),
harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
index 069faad..a648ff6 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeDeleteTest.java
@@ -53,9 +53,9 @@
BTreeLeafFrameType leafType) throws Exception {
return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
- harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
- harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
index 539ed3e..b5b0b76 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeExamplesTest.java
@@ -37,8 +37,9 @@
return LSMBTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), typeTraits, cmpFactories, bloomFilterKeyFields,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Before
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
index f17e3c8..875bbd4 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeInsertTest.java
@@ -53,9 +53,9 @@
BTreeLeafFrameType leafType) throws Exception {
return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
- harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
- harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeLifecycleTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeLifecycleTest.java
index 24d1f10..43d38bc 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeLifecycleTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeLifecycleTest.java
@@ -44,9 +44,10 @@
harness.setUp();
testCtx = LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
- harness.getDiskFileMapProvider(), fieldSerdes, fieldSerdes.length, harness.getMergePolicy(),
+ harness.getDiskFileMapProvider(), fieldSerdes, fieldSerdes.length,
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
index = testCtx.getIndex();
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
index da36c79..0ea2af2 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMergeTest.java
@@ -52,9 +52,9 @@
BTreeLeafFrameType leafType) throws Exception {
return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
- harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
- harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeModificationOperationCallbackTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeModificationOperationCallbackTest.java
index 648e70f..901a31d 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeModificationOperationCallbackTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeModificationOperationCallbackTest.java
@@ -46,8 +46,9 @@
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), SerdeUtils.serdesToTypeTraits(keySerdes),
SerdeUtils.serdesToComparatorFactories(keySerdes, keySerdes.length), bloomFilterKeyFields,
- harness.getMergePolicy(), NoOpOperationTrackerFactory.INSTANCE, harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ NoOpOperationTrackerFactory.INSTANCE, harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
index 3a99c16..ac8290e 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeMultiBulkLoadTest.java
@@ -54,9 +54,9 @@
BTreeLeafFrameType leafType) throws Exception {
return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
- harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
- harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeSearchOperationCallbackTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeSearchOperationCallbackTest.java
index 41d7ae8..33e1641 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeSearchOperationCallbackTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeSearchOperationCallbackTest.java
@@ -39,8 +39,9 @@
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), SerdeUtils.serdesToTypeTraits(keySerdes),
SerdeUtils.serdesToComparatorFactories(keySerdes, keySerdes.length), bloomFilterKeyFields,
- harness.getMergePolicy(), NoOpOperationTrackerFactory.INSTANCE, harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ NoOpOperationTrackerFactory.INSTANCE, harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
index ca89512..cc2db11 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/LSMBTreeUpdateTest.java
@@ -53,9 +53,9 @@
BTreeLeafFrameType leafType) throws Exception {
return LSMBTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
- harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getMergePolicy(),
- harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getDiskFileMapProvider(), fieldSerdes, numKeys, harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
index c494448..d56ff5b 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
@@ -53,8 +53,9 @@
return LSMBTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), typeTraits, cmpFactories, bloomFilterKeyFields,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
index 5d2185a..7e54003 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/perf/LSMTreeRunner.java
@@ -57,6 +57,7 @@
protected IHyracksTaskContext ctx;
protected IOManager ioManager;
+ protected int ioDeviceId;
protected IBufferCache bufferCache;
protected int lsmtreeFileId;
@@ -74,8 +75,8 @@
private final int onDiskNumPages;
public LSMTreeRunner(int numBatches, int inMemPageSize, int inMemNumPages, int onDiskPageSize, int onDiskNumPages,
- ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields)
- throws BTreeException, HyracksException {
+ ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories, int[] bloomFilterKeyFields,
+ double bloomFilterFalsePositiveRate) throws BTreeException, HyracksException {
this.numBatches = numBatches;
this.onDiskPageSize = onDiskPageSize;
@@ -88,6 +89,7 @@
TestStorageManagerComponentHolder.init(this.onDiskPageSize, this.onDiskNumPages, MAX_OPEN_FILES);
bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
ioManager = TestStorageManagerComponentHolder.getIOManager();
+ ioDeviceId = 0;
IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
IInMemoryBufferCache memBufferCache = new InMemoryBufferCache(new HeapBufferAllocator(), inMemPageSize,
@@ -96,8 +98,9 @@
new LIFOMetaDataFrameFactory());
this.ioScheduler = SynchronousScheduler.INSTANCE;
lsmtree = LSMBTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file, bufferCache, fmp,
- typeTraits, cmpFactories, bloomFilterKeyFields, NoMergePolicy.INSTANCE,
- ThreadCountingOperationTrackerFactory.INSTANCE, ioScheduler, NoOpIOOperationCallback.INSTANCE);
+ typeTraits, cmpFactories, bloomFilterKeyFields, bloomFilterFalsePositiveRate, NoMergePolicy.INSTANCE,
+ ThreadCountingOperationTrackerFactory.INSTANCE, ioScheduler, NoOpIOOperationCallback.INSTANCE,
+ ioDeviceId);
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestContext.java
index f790fde..a830dc0 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestContext.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestContext.java
@@ -66,9 +66,9 @@
public static LSMBTreeTestContext create(IInMemoryBufferCache memBufferCache,
IInMemoryFreePageManager memFreePageManager, IOManager ioManager, FileReference file,
IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ISerializerDeserializer[] fieldSerdes,
- int numKeyFields, ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
- throws Exception {
+ int numKeyFields, double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+ ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int ioDeviceId) throws Exception {
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
int[] bloomFilterKeyFields = new int[numKeyFields];
@@ -76,8 +76,9 @@
bloomFilterKeyFields[i] = i;
}
LSMBTree lsmTree = LSMBTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file,
- diskBufferCache, diskFileMapProvider, typeTraits, cmpFactories, bloomFilterKeyFields, mergePolicy,
- opTrackerFactory, ioScheduler, ioOpCallbackProvider);
+ diskBufferCache, diskFileMapProvider, typeTraits, cmpFactories, bloomFilterKeyFields,
+ bloomFilterFalsePositiveRate, mergePolicy, opTrackerFactory, ioScheduler, ioOpCallbackProvider,
+ ioDeviceId);
LSMBTreeTestContext testCtx = new LSMBTreeTestContext(fieldSerdes, lsmTree);
return testCtx;
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
index 9128607..066f57b 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/util/LSMBTreeTestHarness.java
@@ -63,8 +63,10 @@
protected final int memPageSize;
protected final int memNumPages;
protected final int hyracksFrameSize;
+ protected final double bloomFilterFalsePositiveRate;
protected IOManager ioManager;
+ protected int ioDeviceId;
protected IBufferCache diskBufferCache;
protected IFileMapProvider diskFileMapProvider;
protected IInMemoryBufferCache memBufferCache;
@@ -88,6 +90,7 @@
this.memPageSize = AccessMethodTestsConfig.LSM_BTREE_MEM_PAGE_SIZE;
this.memNumPages = AccessMethodTestsConfig.LSM_BTREE_MEM_NUM_PAGES;
this.hyracksFrameSize = AccessMethodTestsConfig.LSM_BTREE_HYRACKS_FRAME_SIZE;
+ this.bloomFilterFalsePositiveRate = AccessMethodTestsConfig.LSM_BTREE_BLOOMFILTER_FALSE_POSITIVE_RATE;
this.ioScheduler = SynchronousScheduler.INSTANCE;
this.mergePolicy = NoMergePolicy.INSTANCE;
this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
@@ -95,13 +98,14 @@
}
public LSMBTreeTestHarness(int diskPageSize, int diskNumPages, int diskMaxOpenFiles, int memPageSize,
- int memNumPages, int hyracksFrameSize) {
+ int memNumPages, int hyracksFrameSize, double bloomFilterFalsePositiveRate) {
this.diskPageSize = diskPageSize;
this.diskNumPages = diskNumPages;
this.diskMaxOpenFiles = diskMaxOpenFiles;
this.memPageSize = memPageSize;
this.memNumPages = memNumPages;
this.hyracksFrameSize = hyracksFrameSize;
+ this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
this.ioScheduler = SynchronousScheduler.INSTANCE;
this.mergePolicy = NoMergePolicy.INSTANCE;
this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
@@ -118,6 +122,7 @@
new TransientFileMapManager());
memFreePageManager = new InMemoryFreePageManager(memNumPages, new LIFOMetaDataFrameFactory());
ioManager = TestStorageManagerComponentHolder.getIOManager();
+ ioDeviceId = 0;
rnd.setSeed(RANDOM_SEED);
}
@@ -169,6 +174,10 @@
return ioManager;
}
+ public int getIODeviceId() {
+ return ioDeviceId;
+ }
+
public IBufferCache getDiskBufferCache() {
return diskBufferCache;
}
@@ -181,6 +190,10 @@
return memBufferCache;
}
+ public double getBoomFilterFalsePositiveRate() {
+ return bloomFilterFalsePositiveRate;
+ }
+
public IInMemoryFreePageManager getMemFreePageManager() {
return memFreePageManager;
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
index 68b432b..7508f7c 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,14 +25,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
index 69e23bc..d41ddd4 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/DummyLSMIndexFileManager.java
@@ -32,8 +32,8 @@
public class DummyLSMIndexFileManager extends AbstractLSMIndexFileManager {
public DummyLSMIndexFileManager(IIOManager ioManager, IFileMapProvider fileMapProvider, FileReference file,
- TreeIndexFactory<? extends ITreeIndex> treeFactory) {
- super(ioManager, fileMapProvider, file, treeFactory, 0);
+ TreeIndexFactory<? extends ITreeIndex> treeFactory, int ioDeviceId) {
+ super(ioManager, fileMapProvider, file, treeFactory, ioDeviceId);
}
protected void cleanupAndGetValidFilesInternal(IODeviceHandle dev, FilenameFilter filter,
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
index 161f4ce..0c9e658 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMIndexFileManagerTest.java
@@ -48,6 +48,7 @@
private static final int DEFAULT_PAGE_SIZE = 256;
private static final int DEFAULT_NUM_PAGES = 100;
private static final int DEFAULT_MAX_OPEN_FILES = 10;
+ private static final int DEFAULT_IO_DEVICE_ID = 0;
protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
protected final static String sep = System.getProperty("file.separator");
protected IOManager ioManager;
@@ -74,7 +75,7 @@
public void sortOrderTest(boolean testFlushFileName) throws InterruptedException, HyracksDataException {
ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
- new DummyTreeFactory());
+ new DummyTreeFactory(), DEFAULT_IO_DEVICE_ID);
LinkedList<String> fileNames = new LinkedList<String>();
int numFileNames = 100;
@@ -115,7 +116,7 @@
public void cleanInvalidFilesTest(IOManager ioManager) throws InterruptedException, IOException, IndexException {
ILSMIndexFileManager fileManager = new DummyLSMIndexFileManager(ioManager, fileMapProvider, file,
- new DummyTreeFactory());
+ new DummyTreeFactory(), DEFAULT_IO_DEVICE_ID);
fileManager.createDirs();
List<FileReference> flushFiles = new ArrayList<FileReference>();
@@ -187,20 +188,18 @@
.getFile().getName());
}
- // Make sure invalid files were removed from all IODevices.
+ // Make sure invalid files were removed from the IODevices.
ArrayList<String> remainingFiles = new ArrayList<String>();
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- File dir = new File(dev.getPath(), baseDir);
- FilenameFilter filter = new FilenameFilter() {
- public boolean accept(File dir, String name) {
- return !name.startsWith(".");
- }
- };
- String[] files = dir.list(filter);
- for (String file : files) {
- File f = new File(file);
- remainingFiles.add(f.getName());
+ File dir = new File(ioManager.getIODevices().get(DEFAULT_IO_DEVICE_ID).getPath(), baseDir);
+ FilenameFilter filter = new FilenameFilter() {
+ public boolean accept(File dir, String name) {
+ return !name.startsWith(".");
}
+ };
+ String[] files = dir.list(filter);
+ for (String file : files) {
+ File f = new File(file);
+ remainingFiles.add(f.getName());
}
Collections.sort(remainingFiles, fileManager.getFileNameComparator());
@@ -218,34 +217,19 @@
cleanDirs(singleDeviceIOManager);
}
- @Test
- public void twoIODevicesTest() throws InterruptedException, IOException, IndexException {
- IOManager twoDevicesIOManager = createIOManager(2);
- cleanInvalidFilesTest(twoDevicesIOManager);
- cleanDirs(twoDevicesIOManager);
- }
-
- @Test
- public void fourIODevicesTest() throws InterruptedException, IOException, IndexException {
- IOManager fourDevicesIOManager = createIOManager(4);
- cleanInvalidFilesTest(fourDevicesIOManager);
- cleanDirs(fourDevicesIOManager);
- }
-
private void cleanDirs(IOManager ioManager) {
- for (IODeviceHandle dev : ioManager.getIODevices()) {
- File dir = new File(dev.getPath(), baseDir);
- FilenameFilter filter = new FilenameFilter() {
- public boolean accept(File dir, String name) {
- return !name.startsWith(".");
- }
- };
- String[] files = dir.list(filter);
- for (String file : files) {
- File f = new File(file);
- f.delete();
+ File dir = new File(ioManager.getIODevices().get(DEFAULT_IO_DEVICE_ID).getPath(), baseDir);
+ FilenameFilter filter = new FilenameFilter() {
+ public boolean accept(File dir, String name) {
+ return !name.startsWith(".");
}
+ };
+ String[] files = dir.list(filter);
+ for (String file : files) {
+ File f = new File(file);
+ f.delete();
}
+
}
private IOManager createIOManager(int numDevices) throws HyracksException {
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
index 273b90f..be0bb6f 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>hyracks-tests</artifactId>
<groupId>edu.uci.ics.hyracks</groupId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
@@ -26,21 +26,21 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
index 5be1d6a..bf2d5e8 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/common/LSMInvertedIndexTestHarness.java
@@ -57,8 +57,10 @@
protected final int memPageSize;
protected final int memNumPages;
protected final int hyracksFrameSize;
+ protected final double bloomFilterFalsePositiveRate;
protected IOManager ioManager;
+ protected int ioDeviceId;
protected IBufferCache diskBufferCache;
protected IFileMapProvider diskFileMapProvider;
protected IInMemoryBufferCache memBufferCache;
@@ -84,6 +86,7 @@
this.memPageSize = AccessMethodTestsConfig.LSM_INVINDEX_MEM_PAGE_SIZE;
this.memNumPages = AccessMethodTestsConfig.LSM_INVINDEX_MEM_NUM_PAGES;
this.hyracksFrameSize = AccessMethodTestsConfig.LSM_INVINDEX_HYRACKS_FRAME_SIZE;
+ this.bloomFilterFalsePositiveRate = AccessMethodTestsConfig.LSM_INVINDEX_BLOOMFILTER_FALSE_POSITIVE_RATE;
this.ioScheduler = SynchronousScheduler.INSTANCE;
this.mergePolicy = NoMergePolicy.INSTANCE;
this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
@@ -91,13 +94,14 @@
}
public LSMInvertedIndexTestHarness(int diskPageSize, int diskNumPages, int diskMaxOpenFiles, int memPageSize,
- int memNumPages, int hyracksFrameSize) {
+ int memNumPages, int hyracksFrameSize, double bloomFilterFalsePositiveRate) {
this.diskPageSize = diskPageSize;
this.diskNumPages = diskNumPages;
this.diskMaxOpenFiles = diskMaxOpenFiles;
this.memPageSize = memPageSize;
this.memNumPages = memNumPages;
this.hyracksFrameSize = hyracksFrameSize;
+ this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
this.ioScheduler = SynchronousScheduler.INSTANCE;
this.mergePolicy = NoMergePolicy.INSTANCE;
this.opTrackerFactory = ThreadCountingOperationTrackerFactory.INSTANCE;
@@ -113,6 +117,7 @@
memBufferCache.open();
memFreePageManager = new DualIndexInMemoryFreePageManager(memNumPages, new LIFOMetaDataFrameFactory());
ioManager = TestStorageManagerComponentHolder.getIOManager();
+ ioDeviceId = 0;
rnd.setSeed(RANDOM_SEED);
invIndexFileRef = ioManager.getIODevices().get(0).createFileReference(onDiskDir + invIndexFileName);
}
@@ -170,6 +175,10 @@
return ioManager;
}
+ public int getIODeviceId() {
+ return ioDeviceId;
+ }
+
public IBufferCache getDiskBufferCache() {
return diskBufferCache;
}
@@ -182,6 +191,10 @@
return memBufferCache;
}
+ public double getBoomFilterFalsePositiveRate() {
+ return bloomFilterFalsePositiveRate;
+ }
+
public IInMemoryFreePageManager getMemFreePageManager() {
return memFreePageManager;
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
index 870e6d9..f955fc9 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/invertedindex/util/LSMInvertedIndexTestContext.java
@@ -146,8 +146,9 @@
harness.getMemFreePageManager(), harness.getDiskFileMapProvider(), invListTypeTraits,
invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
harness.getDiskBufferCache(), harness.getIOManager(), harness.getOnDiskDir(),
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
break;
}
case PARTITIONED_LSM: {
@@ -155,8 +156,9 @@
harness.getMemFreePageManager(), harness.getDiskFileMapProvider(), invListTypeTraits,
invListCmpFactories, tokenTypeTraits, tokenCmpFactories, tokenizerFactory,
harness.getDiskBufferCache(), harness.getIOManager(), harness.getOnDiskDir(),
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
break;
}
default: {
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
index a39fd3a..95332a7 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/pom.xml
@@ -5,7 +5,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -25,14 +25,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
index 995f18c..edd24b4 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeBulkLoadTest.java
@@ -57,8 +57,9 @@
return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
index d72b668..360c02c 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeDeleteTest.java
@@ -57,8 +57,9 @@
return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
index cba8cee..0c68fa6 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeExamplesTest.java
@@ -40,10 +40,10 @@
return LSMRTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
- harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ valueProviderFactories, rtreePolicyType, harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
harness.getIOOperationCallbackProvider(),
- LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
+ LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), harness.getIODeviceId());
}
@Before
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
index 96485f8..f39b139 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeInsertTest.java
@@ -57,8 +57,9 @@
return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeLifecycleTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeLifecycleTest.java
index e72b3ca..8bee460 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeLifecycleTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeLifecycleTest.java
@@ -55,8 +55,9 @@
testCtx = LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, RTreePolicyType.RTREE,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
index = testCtx.getIndex();
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
index 1d07484..5d7d31b 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMergeTest.java
@@ -56,8 +56,9 @@
return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMultiBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMultiBulkLoadTest.java
index cc46065..8ef0a9f 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMultiBulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeMultiBulkLoadTest.java
@@ -57,8 +57,9 @@
return LSMRTreeTestContext.create(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories, numKeys, rtreePolicyType,
- harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
- harness.getIOOperationCallbackProvider());
+ harness.getBoomFilterFalsePositiveRate(), harness.getMergePolicy(),
+ harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
index 81a952d..f778aa9 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesBulkLoadTest.java
@@ -58,7 +58,7 @@
harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
- harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
+ harness.getIOScheduler(), harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
index 1ee92d9..44d3d1b 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesDeleteTest.java
@@ -58,7 +58,7 @@
harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
- harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
+ harness.getIOScheduler(), harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
index 3a2537c..8b12224 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesExamplesTest.java
@@ -43,7 +43,7 @@
btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
harness.getOperationTrackerFactory(), harness.getIOScheduler(),
harness.getIOOperationCallbackProvider(),
- LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
+ LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), harness.getIODeviceId());
}
@Before
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
index 61d5ce7..96ef868 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesInsertTest.java
@@ -58,7 +58,7 @@
harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
- harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
+ harness.getIOScheduler(), harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesLifecycleTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesLifecycleTest.java
index aee8670..012559d 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesLifecycleTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesLifecycleTest.java
@@ -56,7 +56,7 @@
harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
numKeys, RTreePolicyType.RTREE, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
- harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
+ harness.getIOScheduler(), harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
index = testCtx.getIndex();
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
index d5fecbf..1c67cd0 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMergeTest.java
@@ -57,7 +57,7 @@
harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
- harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
+ harness.getIOScheduler(), harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest.java
index de5f065..fe61afd 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesMultiBulkLoadTest.java
@@ -58,7 +58,7 @@
harness.getMemFreePageManager(), harness.getIOManager(), harness.getFileReference(),
harness.getDiskBufferCache(), harness.getDiskFileMapProvider(), fieldSerdes, valueProviderFactories,
numKeys, rtreePolicyType, harness.getMergePolicy(), harness.getOperationTrackerFactory(),
- harness.getIOScheduler(), harness.getIOOperationCallbackProvider());
+ harness.getIOScheduler(), harness.getIOOperationCallbackProvider(), harness.getIODeviceId());
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
index 37be58d..1d2271c 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
@@ -60,10 +60,10 @@
return LSMRTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
harness.getIOManager(), harness.getFileReference(), harness.getDiskBufferCache(),
harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
- harness.getOperationTrackerFactory(), harness.getIOScheduler(),
+ valueProviderFactories, rtreePolicyType, harness.getBoomFilterFalsePositiveRate(),
+ harness.getMergePolicy(), harness.getOperationTrackerFactory(), harness.getIOScheduler(),
harness.getIOOperationCallbackProvider(),
- LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
+ LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), harness.getIODeviceId());
}
@Override
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
index af73676..aa5023d 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeWithAntiMatterTuplesMultiThreadTest.java
@@ -63,7 +63,7 @@
btreeCmpFactories, valueProviderFactories, rtreePolicyType, harness.getMergePolicy(),
harness.getOperationTrackerFactory(), harness.getIOScheduler(),
harness.getIOOperationCallbackProvider(),
- LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
+ LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), harness.getIODeviceId());
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestContext.java
index 27fb9c8..fb9fb23 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestContext.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestContext.java
@@ -71,9 +71,9 @@
IInMemoryFreePageManager memFreePageManager, IOManager ioManager, FileReference file,
IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ISerializerDeserializer[] fieldSerdes,
IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeyFields, RTreePolicyType rtreePolicyType,
- ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
- throws Exception {
+ double bloomFilterFalsePositiveRate, ILSMMergePolicy mergePolicy,
+ ILSMOperationTrackerFactory opTrackerFactory, ILSMIOOperationScheduler ioScheduler,
+ ILSMIOOperationCallbackProvider ioOpCallbackProvider, int ioDeviceId) throws Exception {
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils
.serdesToComparatorFactories(fieldSerdes, numKeyFields);
@@ -81,8 +81,9 @@
fieldSerdes.length);
LSMRTree lsmTree = LSMRTreeUtils.createLSMTree(memBufferCache, memFreePageManager, ioManager, file,
diskBufferCache, diskFileMapProvider, typeTraits, rtreeCmpFactories, btreeCmpFactories,
- valueProviderFactories, rtreePolicyType, mergePolicy, opTrackerFactory, ioScheduler,
- ioOpCallbackProvider, LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
+ valueProviderFactories, rtreePolicyType, bloomFilterFalsePositiveRate, mergePolicy, opTrackerFactory,
+ ioScheduler, ioOpCallbackProvider,
+ LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), ioDeviceId);
LSMRTreeTestContext testCtx = new LSMRTreeTestContext(fieldSerdes, lsmTree);
return testCtx;
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
index 6fb6f9c..4c2e83b 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeTestHarness.java
@@ -59,8 +59,10 @@
protected final int memPageSize;
protected final int memNumPages;
protected final int hyracksFrameSize;
+ protected final double bloomFilterFalsePositiveRate;
protected IOManager ioManager;
+ protected int ioDeviceId;
protected IBufferCache diskBufferCache;
protected IFileMapProvider diskFileMapProvider;
protected IInMemoryBufferCache memBufferCache;
@@ -83,6 +85,7 @@
this.diskMaxOpenFiles = AccessMethodTestsConfig.LSM_RTREE_DISK_MAX_OPEN_FILES;
this.memPageSize = AccessMethodTestsConfig.LSM_RTREE_MEM_PAGE_SIZE;
this.memNumPages = AccessMethodTestsConfig.LSM_RTREE_MEM_NUM_PAGES;
+ this.bloomFilterFalsePositiveRate = AccessMethodTestsConfig.LSM_RTREE_BLOOMFILTER_FALSE_POSITIVE_RATE;
this.hyracksFrameSize = AccessMethodTestsConfig.LSM_RTREE_HYRACKS_FRAME_SIZE;
this.ioScheduler = SynchronousScheduler.INSTANCE;
this.mergePolicy = NoMergePolicy.INSTANCE;
@@ -91,12 +94,13 @@
}
public LSMRTreeTestHarness(int diskPageSize, int diskNumPages, int diskMaxOpenFiles, int memPageSize,
- int memNumPages, int hyracksFrameSize) {
+ int memNumPages, int hyracksFrameSize, double bloomFilterFalsePositiveRate) {
this.diskPageSize = diskPageSize;
this.diskNumPages = diskNumPages;
this.diskMaxOpenFiles = diskMaxOpenFiles;
this.memPageSize = memPageSize;
this.memNumPages = memNumPages;
+ this.bloomFilterFalsePositiveRate = bloomFilterFalsePositiveRate;
this.hyracksFrameSize = hyracksFrameSize;
this.ioScheduler = SynchronousScheduler.INSTANCE;
this.mergePolicy = NoMergePolicy.INSTANCE;
@@ -113,6 +117,7 @@
memBufferCache = new DualIndexInMemoryBufferCache(new HeapBufferAllocator(), memPageSize, memNumPages);
memFreePageManager = new DualIndexInMemoryFreePageManager(memNumPages, new LIFOMetaDataFrameFactory());
ioManager = TestStorageManagerComponentHolder.getIOManager();
+ ioDeviceId = 0;
rnd.setSeed(RANDOM_SEED);
}
@@ -164,6 +169,10 @@
return ioManager;
}
+ public int getIODeviceId() {
+ return ioDeviceId;
+ }
+
public IBufferCache getDiskBufferCache() {
return diskBufferCache;
}
@@ -176,6 +185,10 @@
return memBufferCache;
}
+ public double getBoomFilterFalsePositiveRate() {
+ return bloomFilterFalsePositiveRate;
+ }
+
public IInMemoryFreePageManager getMemFreePageManager() {
return memFreePageManager;
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeWithAntiMatterTuplesTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeWithAntiMatterTuplesTestContext.java
index 5860236..48cc481 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeWithAntiMatterTuplesTestContext.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/util/LSMRTreeWithAntiMatterTuplesTestContext.java
@@ -72,7 +72,7 @@
IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ISerializerDeserializer[] fieldSerdes,
IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeyFields, RTreePolicyType rtreePolicyType,
ILSMMergePolicy mergePolicy, ILSMOperationTrackerFactory opTrackerFactory,
- ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider)
+ ILSMIOOperationScheduler ioScheduler, ILSMIOOperationCallbackProvider ioOpCallbackProvider, int ioDeviceId)
throws Exception {
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils
@@ -83,7 +83,7 @@
memFreePageManager, ioManager, file, diskBufferCache, diskFileMapProvider, typeTraits,
rtreeCmpFactories, btreeCmpFactories, valueProviderFactories, rtreePolicyType, mergePolicy,
opTrackerFactory, ioScheduler, ioOpCallbackProvider,
- LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length));
+ LSMRTreeUtils.proposeBestLinearizer(typeTraits, rtreeCmpFactories.length), ioDeviceId);
LSMRTreeWithAntiMatterTuplesTestContext testCtx = new LSMRTreeWithAntiMatterTuplesTestContext(fieldSerdes,
lsmTree);
return testCtx;
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index b9b0cc3..14adffb 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -34,14 +34,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>test</scope>
</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
index 7abf086..7f69d25 100644
--- a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -6,7 +6,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -34,13 +34,13 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/hyracks/hyracks-tests/pom.xml b/hyracks/hyracks-tests/pom.xml
index 38ddadc..d61209e8 100644
--- a/hyracks/hyracks-tests/pom.xml
+++ b/hyracks/hyracks-tests/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<modules>
diff --git a/hyracks/pom.xml b/hyracks/pom.xml
index 7972b10..4eccb73 100644
--- a/hyracks/pom.xml
+++ b/hyracks/pom.xml
@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<packaging>pom</packaging>
<name>hyracks</name>
diff --git a/pom.xml b/pom.xml
index b68f419..3a7d87b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>fullstack</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<packaging>pom</packaging>
<name>hyracks-ecosystem-full-stack</name>
@@ -26,9 +26,9 @@
</build>
<scm>
- <connection>scm:svn:https://hyracks.googlecode.com/svn/trunk/fullstack</connection>
- <developerConnection>scm:svn:https://hyracks.googlecode.com/svn/trunk/fullstack</developerConnection>
- <url>http://code.google.com/p/hyracks/source/browse/#svn/trunk/fullstack</url>
+ <connection>scm:git:https://code.google.com/p/hyracks/</connection>
+ <developerConnection>scm:git:https://code.google.com/p/hyracks/</developerConnection>
+ <url>https://code.google.com/p/hyracks/source/browse/</url>
</scm>
<distributionManagement>
diff --git a/pregelix/pom.xml b/pregelix/pom.xml
index 46fe7e6..bb2b336 100644
--- a/pregelix/pom.xml
+++ b/pregelix/pom.xml
@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<packaging>pom</packaging>
<name>pregelix</name>
diff --git a/pregelix/pregelix-api/pom.xml b/pregelix/pregelix-api/pom.xml
index d8a49e0..c997ec8 100644
--- a/pregelix/pregelix-api/pom.xml
+++ b/pregelix/pregelix-api/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<properties>
@@ -68,7 +68,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
@@ -81,7 +81,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-hdfs-core</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/NormalizedKeyComputer.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/NormalizedKeyComputer.java
new file mode 100644
index 0000000..6fd0fe5
--- /dev/null
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/graph/NormalizedKeyComputer.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.api.graph;
+
+/**
+ * Users can extend this interface to speedup the performance, e.g., the Alpha-sort optimization for cache locality.
+ * The normalized key is an unsigned integer (represented by a signed integer, though) obtained from the binary represetnation
+ * of the corresponding vertex id.
+ * Usually the normalized key can be obtained from the prefix bytes of the vertex id bytes.
+ *
+ * @author yingyib
+ */
+public interface NormalizedKeyComputer {
+
+ /**
+ * Get the normalized key from the byte region of a vertex id.
+ * The following three parameters represent the byte region of a vertex id.
+ *
+ * @param data
+ * @param start
+ * @param len
+ * @return the normalized key.
+ */
+ public int getNormalizedKey(byte[] data, int start, int len);
+}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
index 8b6d1b6..dde1a5e 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/job/PregelixJob.java
@@ -22,6 +22,7 @@
import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
import edu.uci.ics.pregelix.api.graph.MessageCombiner;
+import edu.uci.ics.pregelix.api.graph.NormalizedKeyComputer;
import edu.uci.ics.pregelix.api.graph.Vertex;
import edu.uci.ics.pregelix.api.io.VertexInputFormat;
import edu.uci.ics.pregelix.api.io.VertexOutputFormat;
@@ -56,6 +57,8 @@
public static final String PARTIAL_AGGREGATE_VALUE_CLASS = "pregelix.partialAggregateValueClass";
/** Final aggregate value class */
public static final String FINAL_AGGREGATE_VALUE_CLASS = "pregelix.finalAggregateValueClass";
+ /** The normalized key computer class */
+ public static final String NMK_COMPUTER_CLASS = "pregelix.nmkComputerClass";
/** num of vertices */
public static final String NUM_VERTICE = "pregelix.numVertices";
/** num of edges */
@@ -166,4 +169,13 @@
final public void setFrameSize(int frameSize) {
getConfiguration().setInt(FRAME_SIZE, frameSize);
}
+
+ /**
+ * Set the normalized key computer class
+ *
+ * @param nkcClass
+ */
+ final public void setNoramlizedKeyComputerClass(Class<?> nkcClass) {
+ getConfiguration().setClass(NMK_COMPUTER_CLASS, nkcClass, NormalizedKeyComputer.class);
+ }
}
diff --git a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
index ff9724d..6dfb416 100644
--- a/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
+++ b/pregelix/pregelix-api/src/main/java/edu/uci/ics/pregelix/api/util/BspUtils.java
@@ -23,6 +23,7 @@
import edu.uci.ics.pregelix.api.graph.GlobalAggregator;
import edu.uci.ics.pregelix.api.graph.MessageCombiner;
import edu.uci.ics.pregelix.api.graph.MsgList;
+import edu.uci.ics.pregelix.api.graph.NormalizedKeyComputer;
import edu.uci.ics.pregelix.api.graph.Vertex;
import edu.uci.ics.pregelix.api.io.VertexInputFormat;
import edu.uci.ics.pregelix.api.io.VertexOutputFormat;
@@ -143,6 +144,18 @@
}
/**
+ * Create a user-defined normalized key computer class
+ *
+ * @param conf
+ * Configuration to check
+ * @return Instantiated user-defined normalized key computer
+ */
+ public static NormalizedKeyComputer createNormalizedKeyComputer(Configuration conf) {
+ Class<? extends NormalizedKeyComputer> nmkClass = getNormalizedKeyComputerClass(conf);
+ return ReflectionUtils.newInstance(nmkClass, conf);
+ }
+
+ /**
* Create a global aggregator class
*
* @param conf
@@ -320,7 +333,22 @@
}
/**
- * Get the user's subclassed global aggregator's global value class.
+ * Get the user's subclassed normalized key computer class.
+ *
+ * @param conf
+ * Configuration to check
+ * @return User's normalized key computer class
+ */
+ @SuppressWarnings("unchecked")
+ public static Class<? extends NormalizedKeyComputer> getNormalizedKeyComputerClass(Configuration conf) {
+ if (conf == null)
+ conf = defaultConf;
+ return (Class<? extends NormalizedKeyComputer>) conf.getClass(PregelixJob.NMK_COMPUTER_CLASS,
+ NormalizedKeyComputer.class);
+ }
+
+ /**
+ * Get the user's subclassed normalized key computer class.
*
* @param conf
* Configuration to check
diff --git a/pregelix/pregelix-core/pom.xml b/pregelix/pregelix-core/pom.xml
index 39fc396..d229dc7 100644
--- a/pregelix/pregelix-core/pom.xml
+++ b/pregelix/pregelix-core/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
@@ -195,84 +195,84 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-dataflow</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-runtime</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
@@ -286,7 +286,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks.examples</groupId>
<artifactId>hyracks-integration-tests</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
@@ -306,7 +306,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-ipc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/INormalizedKeyComputerFactoryProvider.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/INormalizedKeyComputerFactoryProvider.java
deleted file mode 100644
index b6c995a..0000000
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/base/INormalizedKeyComputerFactoryProvider.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.pregelix.core.base;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-
-public interface INormalizedKeyComputerFactoryProvider {
-
- @SuppressWarnings("rawtypes")
- INormalizedKeyComputerFactory getAscINormalizedKeyComputerFactory(Class keyClass);
-
- @SuppressWarnings("rawtypes")
- INormalizedKeyComputerFactory getDescINormalizedKeyComputerFactory(Class keyClass);
-}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
index 72256f9..1f071bf 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/driver/Driver.java
@@ -22,9 +22,6 @@
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.UUID;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -33,6 +30,7 @@
import edu.uci.ics.hyracks.api.client.HyracksConnection;
import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.deployment.DeploymentId;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.api.job.JobFlag;
import edu.uci.ics.hyracks.api.job.JobId;
@@ -45,7 +43,6 @@
import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSingleSort;
import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSort;
import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
-import edu.uci.ics.pregelix.core.util.Utilities;
import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
@SuppressWarnings("rawtypes")
@@ -54,9 +51,7 @@
private JobGen jobGen;
private boolean profiling;
- private String applicationName;
private IHyracksClientConnection hcc;
-
private Class exampleClass;
public Driver(Class exampleClass) {
@@ -71,7 +66,6 @@
@Override
public void runJob(PregelixJob job, Plan planChoice, String ipAddress, int port, boolean profiling)
throws HyracksException {
- applicationName = exampleClass.getSimpleName() + UUID.randomUUID();
try {
/** add hadoop configurations */
URL hadoopCore = job.getClass().getClassLoader().getResource("core-site.xml");
@@ -121,13 +115,13 @@
for (URL url : urls)
if (url.toString().endsWith(".jar"))
jars.add(new File(url.getPath()));
- installApplication(jars);
+ DeploymentId deploymentId = installApplication(jars);
start = System.currentTimeMillis();
FileSystem dfs = FileSystem.get(job.getConfiguration());
dfs.delete(FileOutputFormat.getOutputPath(job), true);
- runCreate(jobGen);
- runDataLoad(jobGen);
+ runCreate(deploymentId, jobGen);
+ runDataLoad(deploymentId, jobGen);
end = System.currentTimeMillis();
time = end - start;
LOG.info("data loading finished " + time + "ms");
@@ -135,7 +129,7 @@
boolean terminate = false;
do {
start = System.currentTimeMillis();
- runLoopBodyIteration(jobGen, i);
+ runLoopBodyIteration(deploymentId, jobGen, i);
end = System.currentTimeMillis();
time = end - start;
LOG.info("iteration " + i + " finished " + time + "ms");
@@ -145,90 +139,86 @@
} while (!terminate);
start = System.currentTimeMillis();
- runHDFSWRite(jobGen);
- runCleanup(jobGen);
+ runHDFSWRite(deploymentId, jobGen);
+ runCleanup(deploymentId, jobGen);
end = System.currentTimeMillis();
time = end - start;
LOG.info("result writing finished " + time + "ms");
+ hcc.unDeployBinary(deploymentId);
LOG.info("job finished");
} catch (Exception e) {
throw new HyracksException(e);
}
}
- private void runCreate(JobGen jobGen) throws Exception {
+ private void runCreate(DeploymentId deploymentId, JobGen jobGen) throws Exception {
try {
JobSpecification treeCreateSpec = jobGen.generateCreatingJob();
- execute(treeCreateSpec);
+ execute(deploymentId, treeCreateSpec);
} catch (Exception e) {
throw e;
}
}
- private void runDataLoad(JobGen jobGen) throws Exception {
+ private void runDataLoad(DeploymentId deploymentId, JobGen jobGen) throws Exception {
try {
JobSpecification bulkLoadJobSpec = jobGen.generateLoadingJob();
- execute(bulkLoadJobSpec);
+ execute(deploymentId, bulkLoadJobSpec);
} catch (Exception e) {
throw e;
}
}
- private void runLoopBodyIteration(JobGen jobGen, int iteration) throws Exception {
+ private void runLoopBodyIteration(DeploymentId deploymentId, JobGen jobGen, int iteration) throws Exception {
try {
JobSpecification loopBody = jobGen.generateJob(iteration);
- execute(loopBody);
+ execute(deploymentId, loopBody);
} catch (Exception e) {
throw e;
}
}
- private void runHDFSWRite(JobGen jobGen) throws Exception {
+ private void runHDFSWRite(DeploymentId deploymentId, JobGen jobGen) throws Exception {
try {
JobSpecification scanSortPrintJobSpec = jobGen.scanIndexWriteGraph();
- execute(scanSortPrintJobSpec);
+ execute(deploymentId, scanSortPrintJobSpec);
} catch (Exception e) {
throw e;
}
}
- private void runCleanup(JobGen jobGen) throws Exception {
+ private void runCleanup(DeploymentId deploymentId, JobGen jobGen) throws Exception {
try {
JobSpecification[] cleanups = jobGen.generateCleanup();
- runJobArray(cleanups);
+ runJobArray(deploymentId, cleanups);
} catch (Exception e) {
throw e;
}
}
- private void runJobArray(JobSpecification[] jobs) throws Exception {
+ private void runJobArray(DeploymentId deploymentId, JobSpecification[] jobs) throws Exception {
for (JobSpecification job : jobs) {
- execute(job);
+ execute(deploymentId, job);
}
}
- private void execute(JobSpecification job) throws Exception {
+ private void execute(DeploymentId deploymentId, JobSpecification job) throws Exception {
job.setUseConnectorPolicyForScheduling(false);
- JobId jobId = hcc
- .startJob(job, profiling ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
+ JobId jobId = hcc.startJob(deploymentId, job,
+ profiling ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
hcc.waitForCompletion(jobId);
}
- public void installApplication(List<File> jars) throws Exception {
- Set<String> allJars = new TreeSet<String>();
+ public DeploymentId installApplication(List<File> jars) throws Exception {
+ List<String> allJars = new ArrayList<String>();
for (File jar : jars) {
allJars.add(jar.getAbsolutePath());
}
long start = System.currentTimeMillis();
- File appZip = Utilities.getHyracksArchive(applicationName, allJars);
+ DeploymentId deploymentId = hcc.deployBinary(allJars);
long end = System.currentTimeMillis();
- LOG.info("jar packing finished " + (end - start) + "ms");
-
- start = System.currentTimeMillis();
- // TODO: Fix this step to use Yarn
- //hcc.createApplication(applicationName, appZip);
- end = System.currentTimeMillis();
LOG.info("jar deployment finished " + (end - start) + "ms");
+ return deploymentId;
}
}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/config/ConfigurationFactory.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/config/ConfigurationFactory.java
index f3089ba..d225eb4 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/config/ConfigurationFactory.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/hadoop/config/ConfigurationFactory.java
@@ -17,6 +17,7 @@
import org.apache.hadoop.conf.Configuration;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.pregelix.api.util.SerDeUtils;
import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
@@ -34,11 +35,11 @@
}
@Override
- public Configuration createConfiguration() throws HyracksDataException {
+ public Configuration createConfiguration(IHyracksTaskContext ctx) throws HyracksDataException {
try {
Configuration conf = new Configuration();
+ conf.setClassLoader(ctx.getJobletContext().getClassLoader());
SerDeUtils.deserialize(conf, data);
- conf.setClassLoader(this.getClass().getClassLoader());
return conf;
} catch (Exception e) {
throw new HyracksDataException(e);
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
index 77fd1a7..c913eff 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGen.java
@@ -70,10 +70,8 @@
import edu.uci.ics.pregelix.core.data.TypeTraits;
import edu.uci.ics.pregelix.core.hadoop.config.ConfigurationFactory;
import edu.uci.ics.pregelix.core.jobgen.clusterconfig.ClusterConfig;
-import edu.uci.ics.pregelix.core.jobgen.provider.NormalizedKeyComputerFactoryProvider;
import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
import edu.uci.ics.pregelix.core.util.DataflowUtils;
-import edu.uci.ics.pregelix.core.util.DatatypeHelper;
import edu.uci.ics.pregelix.dataflow.HDFSFileWriteOperatorDescriptor;
import edu.uci.ics.pregelix.dataflow.VertexFileScanOperatorDescriptor;
import edu.uci.ics.pregelix.dataflow.VertexWriteOperatorDescriptor;
@@ -84,6 +82,7 @@
import edu.uci.ics.pregelix.runtime.bootstrap.StorageManagerInterface;
import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.WritableSerializerDeserializerFactory;
public abstract class JobGen implements IJobGen {
private static final Logger LOGGER = Logger.getLogger(JobGen.class.getName());
@@ -173,6 +172,7 @@
new BTreeDataflowHelperFactory(), new TransientLocalResourceFactoryProvider(),
NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, btreeCreate);
+ spec.setFrameSize(frameSize);
return spec;
}
@@ -211,8 +211,7 @@
*/
int[] sortFields = new int[1];
sortFields[0] = 0;
- INormalizedKeyComputerFactory nkmFactory = NormalizedKeyComputerFactoryProvider.INSTANCE
- .getAscINormalizedKeyComputerFactory(vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
.getClass());
@@ -239,9 +238,10 @@
* connect operator descriptors
*/
ITuplePartitionComputerFactory hashPartitionComputerFactory = new VertexIdPartitionComputerFactory(
- DatatypeHelper.createSerializerDeserializer(vertexIdClass));
+ new WritableSerializerDeserializerFactory(vertexIdClass));
spec.connect(new MToNPartitioningConnectorDescriptor(spec, hashPartitionComputerFactory), scanner, 0, sorter, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+ spec.setFrameSize(frameSize);
return spec;
}
@@ -287,8 +287,7 @@
*/
int[] sortFields = new int[1];
sortFields[0] = 0;
- INormalizedKeyComputerFactory nkmFactory = NormalizedKeyComputerFactoryProvider.INSTANCE
- .getAscINormalizedKeyComputerFactory(vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
comparatorFactories[0] = new WritableComparingBinaryComparatorFactory(WritableComparator.get(vertexIdClass)
.getClass());
@@ -315,10 +314,11 @@
* connect operator descriptors
*/
ITuplePartitionComputerFactory hashPartitionComputerFactory = new VertexIdPartitionComputerFactory(
- DatatypeHelper.createSerializerDeserializer(vertexIdClass));
+ new WritableSerializerDeserializerFactory(vertexIdClass));
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, sorter, 0);
spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, hashPartitionComputerFactory, sortFields,
comparatorFactories), sorter, 0, writer, 0);
+ spec.setFrameSize(frameSize);
return spec;
}
@@ -382,7 +382,7 @@
int[] sortFields = new int[1];
sortFields[0] = 0;
ITuplePartitionComputerFactory hashPartitionComputerFactory = new VertexIdPartitionComputerFactory(
- DatatypeHelper.createSerializerDeserializer(vertexIdClass));
+ new WritableSerializerDeserializerFactory(vertexIdClass));
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, scanner, 0);
spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, hashPartitionComputerFactory, sortFields,
comparatorFactories), scanner, 0, writer, 0);
@@ -444,6 +444,7 @@
*/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, scanner, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), scanner, 0, writer, 0);
+ spec.setFrameSize(frameSize);
return spec;
}
@@ -462,6 +463,7 @@
ClusterConfig.setLocationConstraint(spec, drop);
spec.addRoot(drop);
+ spec.setFrameSize(frameSize);
return spec;
}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
index fe2fcac..9d23ca8 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenInnerJoin.java
@@ -73,6 +73,7 @@
import edu.uci.ics.pregelix.runtime.touchpoint.PreSuperStepRuntimeHookFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.WritableSerializerDeserializerFactory;
public class JobGenInnerJoin extends JobGen {
@@ -174,8 +175,7 @@
* construct local sort operator
*/
int[] keyFields = new int[] { 0 };
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -230,10 +230,11 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
- null, NoOpOperationCallbackFactory.INSTANCE);
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE,
+ new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
/** construct empty sink operator */
@@ -245,7 +246,7 @@
ClusterConfig.setLocationConstraint(spec, emptySink4);
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
/** connect all operators **/
@@ -384,8 +385,7 @@
/**
* construct local sort operator
*/
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -454,10 +454,11 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
- null, NoOpOperationCallbackFactory.INSTANCE);
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE,
+ new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
/** construct empty sink operator */
@@ -470,7 +471,7 @@
ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
/** connect all operators **/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, materializeRead, 0);
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
index f1eceb7..250d245 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoin.java
@@ -73,6 +73,7 @@
import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdNullWriterFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.WritableSerializerDeserializerFactory;
public class JobGenOuterJoin extends JobGen {
@@ -140,8 +141,7 @@
* construct local sort operator
*/
int[] keyFields = new int[] { 0 };
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -213,22 +213,22 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
- null, NoOpOperationCallbackFactory.INSTANCE);
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE,
+ new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
/** construct empty sink operator */
EmptySinkOperatorDescriptor emptySink3 = new EmptySinkOperatorDescriptor(spec);
- ClusterConfig.setLocationConstraint(spec, emptySink3);
/** construct empty sink operator */
EmptySinkOperatorDescriptor emptySink4 = new EmptySinkOperatorDescriptor(spec);
ClusterConfig.setLocationConstraint(spec, emptySink4);
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
/** connect all operators **/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
@@ -345,8 +345,7 @@
/**
* construct local sort operator
*/
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -416,9 +415,10 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
@@ -432,7 +432,7 @@
ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
/** connect all operators **/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
index 314c393..f796dd7 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSingleSort.java
@@ -72,6 +72,7 @@
import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdNullWriterFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.WritableSerializerDeserializerFactory;
public class JobGenOuterJoinSingleSort extends JobGen {
@@ -142,8 +143,7 @@
* construct global sort operator
*/
int[] keyFields = new int[] { 0 };
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -205,10 +205,11 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
- null, NoOpOperationCallbackFactory.INSTANCE);
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE,
+ new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
/** construct empty sink operator */
@@ -221,7 +222,7 @@
ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
/** connect all operators **/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
@@ -333,8 +334,7 @@
/**
* construct global sort operator
*/
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -392,10 +392,11 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
- null, NoOpOperationCallbackFactory.INSTANCE);
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE,
+ new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
/** construct empty sink operator */
@@ -408,7 +409,7 @@
ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
/** connect all operators **/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
index 0c3db38..cb6c215 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenOuterJoinSort.java
@@ -72,6 +72,7 @@
import edu.uci.ics.pregelix.runtime.touchpoint.RuntimeHookFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdNullWriterFactory;
import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdPartitionComputerFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.WritableSerializerDeserializerFactory;
public class JobGenOuterJoinSort extends JobGen {
@@ -139,8 +140,7 @@
* construct local sort operator
*/
int[] keyFields = new int[] { 0 };
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -219,10 +219,11 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
- null, NoOpOperationCallbackFactory.INSTANCE);
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE,
+ new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
/** construct empty sink operator */
@@ -234,7 +235,7 @@
ClusterConfig.setLocationConstraint(spec, emptySink4);
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
/** connect all operators **/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), preSuperStep, 0, scanner, 0);
@@ -347,8 +348,7 @@
/**
* construct local sort operator
*/
- INormalizedKeyComputerFactory nkmFactory = JobGenUtil
- .getINormalizedKeyComputerFactory(iteration, vertexIdClass);
+ INormalizedKeyComputerFactory nkmFactory = JobGenUtil.getINormalizedKeyComputerFactory(conf);
IBinaryComparatorFactory[] sortCmpFactories = new IBinaryComparatorFactory[1];
sortCmpFactories[0] = JobGenUtil.getIBinaryComparatorFactory(iteration, WritableComparator.get(vertexIdClass)
.getClass());
@@ -425,10 +425,11 @@
/**
* add the delete operator to delete vertexes
*/
+ int[] fieldPermutationDelete = new int[] { 0 };
TreeIndexInsertUpdateDeleteOperatorDescriptor deleteOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
spec, rdDelete, storageManagerInterface, lcManagerProvider, fileSplitProvider, typeTraits,
- comparatorFactories, null, fieldPermutation, IndexOperation.DELETE, new BTreeDataflowHelperFactory(),
- null, NoOpOperationCallbackFactory.INSTANCE);
+ comparatorFactories, null, fieldPermutationDelete, IndexOperation.DELETE,
+ new BTreeDataflowHelperFactory(), null, NoOpOperationCallbackFactory.INSTANCE);
ClusterConfig.setLocationConstraint(spec, deleteOp);
/** construct empty sink operator */
@@ -441,7 +442,7 @@
ITuplePartitionComputerFactory unifyingPartitionComputerFactory = new MergePartitionComputerFactory();
ITuplePartitionComputerFactory partionFactory = new VertexIdPartitionComputerFactory(
- rdUnnestedMessage.getFields()[0]);
+ new WritableSerializerDeserializerFactory(vertexIdClass));
/** connect all operators **/
spec.connect(new OneToOneConnectorDescriptor(spec), emptyTupleSource, 0, preSuperStep, 0);
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenUtil.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenUtil.java
index 63b7c6d..319d7cc 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenUtil.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/JobGenUtil.java
@@ -15,10 +15,14 @@
package edu.uci.ics.pregelix.core.jobgen;
+import org.apache.hadoop.conf.Configuration;
+
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.pregelix.core.jobgen.provider.NormalizedKeyComputerFactoryProvider;
+import edu.uci.ics.pregelix.api.graph.NormalizedKeyComputer;
+import edu.uci.ics.pregelix.api.util.BspUtils;
import edu.uci.ics.pregelix.core.runtime.touchpoint.WritableComparingBinaryComparatorFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.VertexIdNormalizedKeyComputerFactory;
@SuppressWarnings({ "rawtypes", "unchecked" })
public class JobGenUtil {
@@ -31,8 +35,12 @@
* @param keyClass
* @return
*/
- public static INormalizedKeyComputerFactory getINormalizedKeyComputerFactory(int iteration, Class keyClass) {
- return NormalizedKeyComputerFactoryProvider.INSTANCE.getAscINormalizedKeyComputerFactory(keyClass);
+ public static INormalizedKeyComputerFactory getINormalizedKeyComputerFactory(Configuration conf) {
+ Class<? extends NormalizedKeyComputer> clazz = BspUtils.getNormalizedKeyComputerClass(conf);
+ if (clazz.equals(NormalizedKeyComputer.class)) {
+ return null;
+ }
+ return new VertexIdNormalizedKeyComputerFactory(clazz);
}
/**
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/provider/NormalizedKeyComputerFactoryProvider.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/provider/NormalizedKeyComputerFactoryProvider.java
deleted file mode 100644
index 0735593..0000000
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/jobgen/provider/NormalizedKeyComputerFactoryProvider.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.pregelix.core.jobgen.provider;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.pregelix.core.base.INormalizedKeyComputerFactoryProvider;
-import edu.uci.ics.pregelix.runtime.touchpoint.VLongAscNormalizedKeyComputerFactory;
-import edu.uci.ics.pregelix.runtime.touchpoint.VLongDescNormalizedKeyComputerFactory;
-
-public class NormalizedKeyComputerFactoryProvider implements INormalizedKeyComputerFactoryProvider {
-
- public static INormalizedKeyComputerFactoryProvider INSTANCE = new NormalizedKeyComputerFactoryProvider();
-
- private NormalizedKeyComputerFactoryProvider() {
-
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public INormalizedKeyComputerFactory getAscINormalizedKeyComputerFactory(Class keyClass) {
- if (keyClass.getName().indexOf("VLongWritable") > 0)
- return new VLongAscNormalizedKeyComputerFactory();
- else
- return null;
- }
-
- @SuppressWarnings("rawtypes")
- @Override
- public INormalizedKeyComputerFactory getDescINormalizedKeyComputerFactory(Class keyClass) {
- if (keyClass.getName().indexOf("VLongWritable") > 0)
- return new VLongDescNormalizedKeyComputerFactory();
- else
- return null;
- }
-}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableRecordDescriptorFactory.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableRecordDescriptorFactory.java
index d1d927d..145169e 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableRecordDescriptorFactory.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/runtime/touchpoint/WritableRecordDescriptorFactory.java
@@ -14,6 +14,7 @@
*/
package edu.uci.ics.pregelix.core.runtime.touchpoint;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
@@ -29,9 +30,9 @@
}
@Override
- public RecordDescriptor createRecordDescriptor() throws HyracksDataException {
+ public RecordDescriptor createRecordDescriptor(IHyracksTaskContext ctx) throws HyracksDataException {
try {
- return DataflowUtils.getRecordDescriptorFromWritableClasses(fieldClasses);
+ return DataflowUtils.getRecordDescriptorFromWritableClasses(ctx, fieldClasses);
} catch (HyracksException e) {
throw new HyracksDataException(e);
}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataflowUtils.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataflowUtils.java
index bcf3ffc..e9132c4 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataflowUtils.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DataflowUtils.java
@@ -17,6 +17,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
@@ -27,6 +28,7 @@
import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
import edu.uci.ics.pregelix.runtime.simpleagg.AccumulatingAggregatorFactory;
import edu.uci.ics.pregelix.runtime.simpleagg.AggregationFunctionFactory;
+import edu.uci.ics.pregelix.runtime.touchpoint.DatatypeHelper;
public class DataflowUtils {
@@ -40,9 +42,10 @@
throws HyracksException {
RecordDescriptor recordDescriptor = null;
try {
+ ClassLoader loader = DataflowUtils.class.getClassLoader();
recordDescriptor = DatatypeHelper.createKeyValueRecordDescriptor(
- (Class<? extends Writable>) Class.forName(className1),
- (Class<? extends Writable>) Class.forName(className2));
+ (Class<? extends Writable>) loader.loadClass(className1),
+ (Class<? extends Writable>) loader.loadClass(className2));
} catch (ClassNotFoundException cnfe) {
throw new HyracksException(cnfe);
}
@@ -53,11 +56,12 @@
public static RecordDescriptor getRecordDescriptorFromWritableClasses(String... classNames) throws HyracksException {
RecordDescriptor recordDescriptor = null;
ISerializerDeserializer[] serdes = new ISerializerDeserializer[classNames.length];
+ ClassLoader loader = DataflowUtils.class.getClassLoader();
try {
int i = 0;
for (String className : classNames)
- serdes[i++] = DatatypeHelper.createSerializerDeserializer((Class<? extends Writable>) Class
- .forName(className));
+ serdes[i++] = DatatypeHelper.createSerializerDeserializer((Class<? extends Writable>) loader
+ .loadClass(className));
} catch (ClassNotFoundException cnfe) {
throw new HyracksException(cnfe);
}
@@ -79,4 +83,35 @@
new IAggregateFunctionFactory[] { aggFuncFactory });
return aggregatorFactory;
}
+
+ @SuppressWarnings("unchecked")
+ public static RecordDescriptor getRecordDescriptorFromKeyValueClasses(IHyracksTaskContext ctx, String className1,
+ String className2) throws HyracksException {
+ RecordDescriptor recordDescriptor = null;
+ try {
+ recordDescriptor = DatatypeHelper.createKeyValueRecordDescriptor((Class<? extends Writable>) ctx
+ .getJobletContext().loadClass(className1), (Class<? extends Writable>) ctx.getJobletContext()
+ .loadClass(className2));
+ } catch (Exception cnfe) {
+ throw new HyracksException(cnfe);
+ }
+ return recordDescriptor;
+ }
+
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ public static RecordDescriptor getRecordDescriptorFromWritableClasses(IHyracksTaskContext ctx, String... classNames)
+ throws HyracksException {
+ RecordDescriptor recordDescriptor = null;
+ ISerializerDeserializer[] serdes = new ISerializerDeserializer[classNames.length];
+ try {
+ int i = 0;
+ for (String className : classNames)
+ serdes[i++] = DatatypeHelper.createSerializerDeserializer((Class<? extends Writable>) ctx
+ .getJobletContext().loadClass(className));
+ } catch (Exception cnfe) {
+ throw new HyracksException(cnfe);
+ }
+ recordDescriptor = new RecordDescriptor(serdes);
+ return recordDescriptor;
+ }
}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
index d099645..8410e1e 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
+++ b/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/PregelixHyracksIntegrationUtil.java
@@ -52,7 +52,7 @@
ccConfig.clusterNetPort = TEST_HYRACKS_CC_PORT;
ccConfig.clientNetPort = TEST_HYRACKS_CC_CLIENT_PORT;
ccConfig.defaultMaxJobAttempts = 0;
- ccConfig.jobHistorySize = 0;
+ ccConfig.jobHistorySize = 1;
ccConfig.profileDumpPeriod = -1;
// cluster controller
diff --git a/pregelix/pregelix-core/src/main/resources/conf/cluster.properties b/pregelix/pregelix-core/src/main/resources/conf/cluster.properties
index 2d2401a..056cce4 100644
--- a/pregelix/pregelix-core/src/main/resources/conf/cluster.properties
+++ b/pregelix/pregelix-core/src/main/resources/conf/cluster.properties
@@ -4,9 +4,6 @@
#The CC port for Hyracks cluster management
CC_CLUSTERPORT=1099
-#The directory of hyracks binaries
-HYRACKS_HOME=../../../../hyracks
-
#The tmp directory for cc to install jars
CCTMP_DIR=/tmp/t1
@@ -29,9 +26,11 @@
FRAME_SIZE=65536
#CC JAVA_OPTS
-CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+CCJAVA_OPTS="-Xmx1g -Djava.util.logging.config.file=logging.properties"
+# debug option: CCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7001,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
#NC JAVA_OPTS
-NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
-
+NCJAVA_OPTS="-Xmx1g -Djava.util.logging.config.file=logging.properties"
+# debug option: NCJAVA_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,address=7002,server=y,suspend=n -Xmx1g -Djava.util.logging.config.file=logging.properties"
+# Yourkit option: -agentpath:/grid/0/dev/vborkar/tools/yjp-10.0.4/bin/linux-x86-64/libyjpagent.so=port=20001"
diff --git a/pregelix/pregelix-core/src/main/resources/hyracks-deployment.properties b/pregelix/pregelix-core/src/main/resources/hyracks-deployment.properties
deleted file mode 100644
index d5d7cd0..0000000
--- a/pregelix/pregelix-core/src/main/resources/hyracks-deployment.properties
+++ /dev/null
@@ -1 +0,0 @@
-nc.bootstrap.class=edu.uci.ics.pregelix.runtime.bootstrap.NCBootstrapImpl
\ No newline at end of file
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/getip.sh b/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
index a691c0f..1b44d09 100755
--- a/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/getip.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
#get the OS
OS_NAME=`uname -a|awk '{print $1}'`
LINUX_OS='Linux'
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/pregelix b/pregelix/pregelix-core/src/main/resources/scripts/pregelix
index b1a2f74..7232ccc 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/pregelix
+++ b/pregelix/pregelix-core/src/main/resources/scripts/pregelix
@@ -1,22 +1,20 @@
#!/bin/sh
-# ----------------------------------------------------------------------------
-# Copyright 2001-2006 The Apache Software Foundation.
#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# ----------------------------------------------------------------------------
-#
-# Copyright (c) 2001-2006 The Apache Software Foundation. All rights
-# reserved.
# resolve links - $0 may be a softlink
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/pregelixcc b/pregelix/pregelix-core/src/main/resources/scripts/pregelixcc
new file mode 100755
index 0000000..c1ee3f2
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/pregelixcc
@@ -0,0 +1,114 @@
+#!/bin/sh
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
+# resolve links - $0 may be a softlink
+PRG="$0"
+
+while [ -h "$PRG" ]; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`/"$link"
+ fi
+done
+
+PRGDIR=`dirname "$PRG"`
+BASEDIR=`cd "$PRGDIR/.." >/dev/null; pwd`
+
+
+
+# OS specific support. $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+case "`uname`" in
+ CYGWIN*) cygwin=true ;;
+ Darwin*) darwin=true
+ if [ -z "$JAVA_VERSION" ] ; then
+ JAVA_VERSION="CurrentJDK"
+ else
+ echo "Using Java version: $JAVA_VERSION"
+ fi
+ if [ -z "$JAVA_HOME" ] ; then
+ JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home
+ fi
+ ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+ if [ -r /etc/gentoo-release ] ; then
+ JAVA_HOME=`java-config --jre-home`
+ fi
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# If a specific java binary isn't specified search for the standard 'java' binary
+if [ -z "$JAVACMD" ] ; then
+ if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ else
+ JAVACMD=`which java`
+ fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+ echo "Error: JAVA_HOME is not defined correctly." 1>&2
+ echo " We cannot execute $JAVACMD" 1>&2
+ exit 1
+fi
+
+if [ -z "$REPO" ]
+then
+ REPO="$BASEDIR"/lib
+fi
+
+CLASSPATH=$CLASSPATH_PREFIX:"$HADOOP_HOME"/conf:/etc/hadoop/conf:"$BASEDIR"/etc:$1
+
+for f in ${REPO}/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+ [ -n "$HOME" ] && HOME=`cygpath --path --windows "$HOME"`
+ [ -n "$BASEDIR" ] && BASEDIR=`cygpath --path --windows "$BASEDIR"`
+ [ -n "$REPO" ] && REPO=`cygpath --path --windows "$REPO"`
+fi
+
+exec "$JAVACMD" $JAVA_OPTS \
+ -classpath "$CLASSPATH" \
+ -Dapp.name="pregelixcc" \
+ -Dapp.pid="$$" \
+ -Dapp.repo="$REPO" \
+ -Dapp.home="$BASEDIR" \
+ -Dbasedir="$BASEDIR" \
+ edu.uci.ics.hyracks.control.cc.CCDriver \
+ "$@"
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/pregelixnc b/pregelix/pregelix-core/src/main/resources/scripts/pregelixnc
new file mode 100755
index 0000000..c01b4b4
--- /dev/null
+++ b/pregelix/pregelix-core/src/main/resources/scripts/pregelixnc
@@ -0,0 +1,115 @@
+#!/bin/sh
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
+# resolve links - $0 may be a softlink
+PRG="$0"
+
+while [ -h "$PRG" ]; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`/"$link"
+ fi
+done
+
+PRGDIR=`dirname "$PRG"`
+BASEDIR=`cd "$PRGDIR/.." >/dev/null; pwd`
+
+
+
+# OS specific support. $var _must_ be set to either true or false.
+cygwin=false;
+darwin=false;
+case "`uname`" in
+ CYGWIN*) cygwin=true ;;
+ Darwin*) darwin=true
+ if [ -z "$JAVA_VERSION" ] ; then
+ JAVA_VERSION="CurrentJDK"
+ else
+ echo "Using Java version: $JAVA_VERSION"
+ fi
+ if [ -z "$JAVA_HOME" ] ; then
+ JAVA_HOME=/System/Library/Frameworks/JavaVM.framework/Versions/${JAVA_VERSION}/Home
+ fi
+ ;;
+esac
+
+if [ -z "$JAVA_HOME" ] ; then
+ if [ -r /etc/gentoo-release ] ; then
+ JAVA_HOME=`java-config --jre-home`
+ fi
+fi
+
+# For Cygwin, ensure paths are in UNIX format before anything is touched
+if $cygwin ; then
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"`
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --unix "$CLASSPATH"`
+fi
+
+# If a specific java binary isn't specified search for the standard 'java' binary
+if [ -z "$JAVACMD" ] ; then
+ if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ else
+ JAVACMD=`which java`
+ fi
+fi
+
+if [ ! -x "$JAVACMD" ] ; then
+ echo "Error: JAVA_HOME is not defined correctly." 1>&2
+ echo " We cannot execute $JAVACMD" 1>&2
+ exit 1
+fi
+
+if [ -z "$REPO" ]
+then
+ REPO="$BASEDIR"/lib
+fi
+
+CLASSPATH=$CLASSPATH_PREFIX:"$HADOOP_HOME"/conf:/etc/hadoop/conf:"$BASEDIR"/etc:$1
+
+for f in ${REPO}/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+done
+
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin; then
+ [ -n "$CLASSPATH" ] && CLASSPATH=`cygpath --path --windows "$CLASSPATH"`
+ [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"`
+ [ -n "$HOME" ] && HOME=`cygpath --path --windows "$HOME"`2
+ [ -n "$BASEDIR" ] && BASEDIR=`cygpath --path --windows "$BASEDIR"`
+ [ -n "$REPO" ] && REPO=`cygpath --path --windows "$REPO"`
+fi
+
+exec "$JAVACMD" $JAVA_OPTS \
+ -classpath "$CLASSPATH" \
+ -Dapp.name="pregelixnc" \
+ -Dapp.pid="$$" \
+ -Dapp.repo="$REPO" \
+ -Dapp.home="$BASEDIR" \
+ -Dbasedir="$BASEDIR" \
+ edu.uci.ics.hyracks.control.nc.NCDriver \
+ -app-nc-main-class edu.uci.ics.pregelix.runtime.bootstrap.NCApplicationEntryPoint "$@"
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh b/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
index d30da26..821be00 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startAllNCs.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
PREGELIX_PATH=`pwd`
for i in `cat conf/slaves`
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startCluster.sh b/pregelix/pregelix-core/src/main/resources/scripts/startCluster.sh
index a0c2063..cddf9a4 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/startCluster.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startCluster.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
bin/startcc.sh
sleep 5
bin/startAllNCs.sh
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startDebugNc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startDebugNc.sh
index fe6cf27..bb43686 100755
--- a/pregelix/pregelix-core/src/main/resources/scripts/startDebugNc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startDebugNc.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
#Get the IP address of the cc
@@ -40,11 +58,11 @@
#Set JAVA_OPTS
export JAVA_OPTS=$NCJAVA_OPTS2
-cd $HYRACKS_HOME
-HYRACKS_HOME=`pwd`
+PREGELIX_HOME=`pwd`
#Enter the temp dir
cd $NCTMP_DIR2
#Launch hyracks nc
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
+#echo ${PREGELIX_HOME}/bin/pregelixnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}"
+${PREGELIX_HOME}/bin/pregelixnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS2}" &> $NCLOGS_DIR2/$NODEID.log &
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
index 133b604..282a6e7 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startcc.sh
@@ -1,4 +1,21 @@
#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
#Import cluster properties
@@ -20,12 +37,15 @@
export JAVA_HOME=$JAVA_HOME
export JAVA_OPTS=$CCJAVA_OPTS
+PREGELIX_HOME=`pwd`
-chmod -R 755 $HYRACKS_HOME
+#Enter the temp dir
+cd $CCTMP_DIR
+
if [ -f "conf/topology.xml" ]; then
#Launch hyracks cc script with topology
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
+${PREGELIX_HOME}/bin/pregelixcc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 -cluster-topology "conf/topology.xml" &> $CCLOGS_DIR/cc.log &
else
#Launch hyracks cc script without toplogy
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyrackscc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
+${PREGELIX_HOME}/bin/pregelixcc -client-net-ip-address $CCHOST -cluster-net-ip-address $CCHOST -client-net-port $CC_CLIENTPORT -cluster-net-port $CC_CLUSTERPORT -max-heartbeat-lapse-periods 999999 -default-max-job-attempts 0 -job-history-size 0 &> $CCLOGS_DIR/cc.log &
fi
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh b/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
index b059aad..970c30b 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/startnc.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
MY_NAME=`hostname`
@@ -39,11 +57,10 @@
#Set JAVA_OPTS
export JAVA_OPTS=$NCJAVA_OPTS
-cd $HYRACKS_HOME
-HYRACKS_HOME=`pwd`
+PREGELIX_HOME=`pwd`
#Enter the temp dir
cd $NCTMP_DIR
#Launch hyracks nc
-$HYRACKS_HOME/hyracks-server/target/appassembler/bin/hyracksnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
+${PREGELIX_HOME}/bin/pregelixnc -cc-host $CCHOST -cc-port $CC_CLUSTERPORT -cluster-net-ip-address $IPADDR -data-ip-address $IPADDR -result-ip-address $IPADDR -node-id $NODEID -iodevices "${IO_DIRS}" &> $NCLOGS_DIR/$NODEID.log &
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopAllNCs.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopAllNCs.sh
index 12367c1..dc576d1 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/stopAllNCs.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopAllNCs.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
PREGELIX_PATH=`pwd`
for i in `cat conf/slaves`
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopCluster.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopCluster.sh
index 4889934..6b829aa 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/stopCluster.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopCluster.sh
@@ -1,3 +1,21 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
bin/stopAllNCs.sh
sleep 2
bin/stopcc.sh
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopcc.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopcc.sh
index c2f525a..8f94fb7 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/stopcc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopcc.sh
@@ -1,8 +1,37 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
. conf/cluster.properties
#Kill process
-PID=`ps -ef|grep ${USER}|grep java|grep hyracks|awk '{print $2}'`
+#Kill process
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=pregelixcc'|awk '{print $2}'`
+
+if [ "$PID" == "" ]; then
+ PID=`ps -ef|grep ${USER}|grep java|grep 'hyracks'|awk '{print $2}'`
+fi
+
+if [ "$PID" == "" ]; then
+ USERID=`id | sed 's/^uid=//;s/(.*$//'`
+ PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=pregelixcc'|awk '{print $2}'`
+fi
+
echo $PID
kill -9 $PID
diff --git a/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh b/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
index 35c4794..26ef089 100644
--- a/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
+++ b/pregelix/pregelix-core/src/main/resources/scripts/stopnc.sh
@@ -1,8 +1,26 @@
+#!/bin/bash
+#
+#------------------------------------------------------------------------
+# Copyright 2009-2013 by The Regents of the University of California
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# you may obtain a copy of the License from
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ------------------------------------------------------------------------
+#
+
hostname
. conf/cluster.properties
#Kill process
-PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+PID=`ps -ef|grep ${USER}|grep java|grep 'Dapp.name=pregelixnc'|awk '{print $2}'`
if [ "$PID" == "" ]; then
PID=`ps -ef|grep ${USER}|grep java|grep 'hyracks'|awk '{print $2}'`
@@ -10,7 +28,7 @@
if [ "$PID" == "" ]; then
USERID=`id | sed 's/^uid=//;s/(.*$//'`
- PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=hyracksnc'|awk '{print $2}'`
+ PID=`ps -ef|grep ${USERID}|grep java|grep 'Dapp.name=pregelixnc'|awk '{print $2}'`
fi
echo $PID
diff --git a/pregelix/pregelix-dataflow-std-base/pom.xml b/pregelix/pregelix-dataflow-std-base/pom.xml
index 1ebc65d..1209496 100644
--- a/pregelix/pregelix-dataflow-std-base/pom.xml
+++ b/pregelix/pregelix-dataflow-std-base/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
@@ -74,14 +74,14 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunctionFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunctionFactory.java
index 4be0bed..dc75b07 100644
--- a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunctionFactory.java
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IAggregateFunctionFactory.java
@@ -16,9 +16,11 @@
import java.io.Serializable;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
public interface IAggregateFunctionFactory extends Serializable {
- public IAggregateFunction createAggregateFunction(IDataOutputProvider provider) throws HyracksException;
+ public IAggregateFunction createAggregateFunction(IHyracksTaskContext ctx,
+ IDataOutputProvider provider) throws HyracksException;
}
\ No newline at end of file
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRecordDescriptorFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRecordDescriptorFactory.java
index e7de650..7454f2d 100644
--- a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRecordDescriptorFactory.java
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/IRecordDescriptorFactory.java
@@ -16,11 +16,12 @@
import java.io.Serializable;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
public interface IRecordDescriptorFactory extends Serializable {
- public RecordDescriptor createRecordDescriptor() throws HyracksDataException;
+ public RecordDescriptor createRecordDescriptor(IHyracksTaskContext ctx) throws HyracksDataException;
}
diff --git a/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/ISerializerDeserializerFactory.java b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/ISerializerDeserializerFactory.java
new file mode 100644
index 0000000..16b067a
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std-base/src/main/java/edu/uci/ics/pregelix/dataflow/std/base/ISerializerDeserializerFactory.java
@@ -0,0 +1,11 @@
+package edu.uci.ics.pregelix.dataflow.std.base;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+
+public interface ISerializerDeserializerFactory<T> extends Serializable {
+
+ public ISerializerDeserializer<T> getSerializerDeserializer();
+
+}
diff --git a/pregelix/pregelix-dataflow-std/pom.xml b/pregelix/pregelix-dataflow-std/pom.xml
index 884577e..585e82b 100644
--- a/pregelix/pregelix-dataflow-std/pom.xml
+++ b/pregelix/pregelix-dataflow-std/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
@@ -74,77 +74,77 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-dataflow-std-base</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-hdfs-core</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-ipc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
index ff95e52..1fb5aca 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/BTreeSearchFunctionUpdateOperatorNodePushable.java
@@ -43,7 +43,9 @@
import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.CopyUpdateUtil;
import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.SearchKeyTupleReference;
import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
public class BTreeSearchFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
@@ -74,6 +76,7 @@
private final FunctionProxy functionProxy;
private ArrayTupleBuilder cloneUpdateTb;
private final UpdateBuffer updateBuffer;
+ private final SearchKeyTupleReference tempTupleReference = new SearchKeyTupleReference();
public BTreeSearchFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
@@ -147,21 +150,8 @@
functionProxy.functionCall(tuple, cloneUpdateTb);
//doing clone update
- if (cloneUpdateTb.getSize() > 0) {
- if (!updateBuffer.appendTuple(cloneUpdateTb)) {
- //release the cursor/latch
- cursor.close();
- //batch update
- updateBuffer.updateBTree(indexAccessor);
-
- //search again
- cursor.reset();
- rangePred.setLowKey(tuple, true);
- rangePred.setHighKey(highKey, highKeyInclusive);
- indexAccessor.search(cursor, rangePred);
- }
- }
- cloneUpdateTb.reset();
+ CopyUpdateUtil.copyUpdate(tempTupleReference, tuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
+ rangePred);
}
}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
index 4cbd6c4..24d28b0 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/FunctionCallOperatorDescriptor.java
@@ -66,7 +66,7 @@
@Override
public void open() throws HyracksDataException {
rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
- : inputRdFactory.createRecordDescriptor();
+ : inputRdFactory.createRecordDescriptor(ctx);
frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
ctxCL = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
index 61e4649..427ffe9 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopJoinFunctionUpdateOperatorNodePushable.java
@@ -43,7 +43,9 @@
import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.CopyUpdateUtil;
import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.SearchKeyTupleReference;
import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
public class IndexNestedLoopJoinFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
@@ -69,6 +71,7 @@
private final FunctionProxy functionProxy;
private ArrayTupleBuilder cloneUpdateTb;
private final UpdateBuffer updateBuffer;
+ private final SearchKeyTupleReference tempTupleReference = new SearchKeyTupleReference();
public IndexNestedLoopJoinFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
@@ -163,21 +166,9 @@
*/
functionProxy.functionCall(leftAccessor, tIndex, tupleRef, cloneUpdateTb);
- if (cloneUpdateTb.getSize() > 0) {
- if (!updateBuffer.appendTuple(cloneUpdateTb)) {
- //release the cursor/latch
- cursor.close();
- //batch update
- updateBuffer.updateBTree(indexAccessor);
-
- //search again
- cursor.reset();
- rangePred.setLowKey(tupleRef, true);
- rangePred.setHighKey(highKey, highKeyInclusive);
- indexAccessor.search(cursor, rangePred);
- }
- }
- cloneUpdateTb.reset();
+ //doing copy update
+ CopyUpdateUtil.copyUpdate(tempTupleReference, tupleRef, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
+ rangePred);
}
}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
index 5ca5382..0c13a09 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable.java
@@ -45,7 +45,9 @@
import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.CopyUpdateUtil;
import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.SearchKeyTupleReference;
import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
public class IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable extends
@@ -79,7 +81,8 @@
private final IFrameWriter[] writers;
private final FunctionProxy functionProxy;
private ArrayTupleBuilder cloneUpdateTb;
- private UpdateBuffer updateBuffer;
+ private final UpdateBuffer updateBuffer;
+ private final SearchKeyTupleReference tempTupleReference = new SearchKeyTupleReference();
public IndexNestedLoopRightOuterJoinFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
@@ -184,32 +187,6 @@
}
}
- //for the join match casesos
- private void writeResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
- throws Exception {
- /**
- * function call
- */
- functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb);
-
- //doing clone update
- if (cloneUpdateTb.getSize() > 0) {
- if (!updateBuffer.appendTuple(cloneUpdateTb)) {
- //release the cursor/latch
- cursor.close();
- //batch update
- updateBuffer.updateBTree(indexAccessor);
-
- //search again and recover the cursor
- cursor.reset();
- rangePred.setLowKey(frameTuple, true);
- rangePred.setHighKey(null, true);
- indexAccessor.search(cursor, rangePred);
- }
- cloneUpdateTb.reset();
- }
- }
-
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
accessor.reset(buffer);
@@ -289,6 +266,19 @@
return lowKeySearchCmp.compare(left, right);
}
+ //for the join match casesos
+ private void writeResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
+ throws Exception {
+ /**
+ * function call
+ */
+ functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb);
+
+ //doing clone update
+ CopyUpdateUtil.copyUpdate(tempTupleReference, frameTuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
+ rangePred);
+ }
+
/** write result for outer case */
private void writeResults(ITupleReference frameTuple) throws Exception {
/**
@@ -297,21 +287,8 @@
functionProxy.functionCall(nullTupleBuilder, frameTuple, cloneUpdateTb);
//doing clone update
- if (cloneUpdateTb.getSize() > 0) {
- if (!updateBuffer.appendTuple(cloneUpdateTb)) {
- //release the cursor/latch
- cursor.close();
- //batch update
- updateBuffer.updateBTree(indexAccessor);
-
- //search again and recover the cursor
- cursor.reset();
- rangePred.setLowKey(frameTuple, true);
- rangePred.setHighKey(null, true);
- indexAccessor.search(cursor, rangePred);
- }
- cloneUpdateTb.reset();
- }
+ CopyUpdateUtil.copyUpdate(tempTupleReference, frameTuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
+ rangePred);
}
@Override
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
index 160324e..1e5c2ea 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
@@ -43,7 +43,9 @@
import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
+import edu.uci.ics.pregelix.dataflow.util.CopyUpdateUtil;
import edu.uci.ics.pregelix.dataflow.util.FunctionProxy;
+import edu.uci.ics.pregelix.dataflow.util.SearchKeyTupleReference;
import edu.uci.ics.pregelix.dataflow.util.UpdateBuffer;
public class IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
@@ -72,6 +74,7 @@
private final FunctionProxy functionProxy;
private ArrayTupleBuilder cloneUpdateTb;
private UpdateBuffer updateBuffer;
+ private final SearchKeyTupleReference tempTupleReference = new SearchKeyTupleReference();
public IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
@@ -235,21 +238,8 @@
functionProxy.functionCall(frameTuple, cloneUpdateTb);
//doing clone update
- if (cloneUpdateTb.getSize() > 0) {
- if (!updateBuffer.appendTuple(cloneUpdateTb)) {
- //release the cursor/latch
- cursor.close();
- //batch update
- updateBuffer.updateBTree(indexAccessor);
-
- //search again
- cursor.reset();
- rangePred.setLowKey(frameTuple, true);
- rangePred.setHighKey(null, true);
- indexAccessor.search(cursor, rangePred);
- }
- cloneUpdateTb.reset();
- }
+ CopyUpdateUtil.copyUpdate(tempTupleReference, frameTuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
+ rangePred);
}
/** write the left result */
@@ -258,21 +248,8 @@
functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb);
//doing clone update
- if (cloneUpdateTb.getSize() > 0) {
- if (!updateBuffer.appendTuple(cloneUpdateTb)) {
- //release the cursor/latch
- cursor.close();
- //batch update
- updateBuffer.updateBTree(indexAccessor);
-
- //search again
- cursor.reset();
- rangePred.setLowKey(frameTuple, true);
- rangePred.setHighKey(null, true);
- indexAccessor.search(cursor, rangePred);
- }
- cloneUpdateTb.reset();
- }
+ CopyUpdateUtil.copyUpdate(tempTupleReference, frameTuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
+ rangePred);
}
@Override
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/CopyUpdateUtil.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/CopyUpdateUtil.java
new file mode 100644
index 0000000..30df916
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/CopyUpdateUtil.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.pregelix.dataflow.util;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+
+public class CopyUpdateUtil {
+
+ public static void copyUpdate(SearchKeyTupleReference tempTupleReference, ITupleReference frameTuple,
+ UpdateBuffer updateBuffer, ArrayTupleBuilder cloneUpdateTb, ITreeIndexAccessor indexAccessor,
+ ITreeIndexCursor cursor, RangePredicate rangePred) throws HyracksDataException, IndexException {
+ if (cloneUpdateTb.getSize() > 0) {
+ int[] fieldEndOffsets = cloneUpdateTb.getFieldEndOffsets();
+ int srcStart = fieldEndOffsets[0];
+ int srcLen = fieldEndOffsets[1] - fieldEndOffsets[0]; // the updated vertex size
+ int frSize = frameTuple.getFieldLength(1); // the vertex binary size in the leaf page
+ if (srcLen <= frSize) {
+ //doing in-place update if possible, save the "real update" overhead
+ System.arraycopy(cloneUpdateTb.getByteArray(), srcStart, frameTuple.getFieldData(1),
+ frameTuple.getFieldStart(1), srcLen);
+ cloneUpdateTb.reset();
+ return;
+ }
+ if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+ tempTupleReference.reset(frameTuple.getFieldData(0), frameTuple.getFieldStart(0),
+ frameTuple.getFieldLength(0));
+ //release the cursor/latch
+ cursor.close();
+ //batch update
+ updateBuffer.updateBTree(indexAccessor);
+ //try append the to-be-updated tuple again
+ if (!updateBuffer.appendTuple(cloneUpdateTb)) {
+ throw new HyracksDataException("cannot append tuple builder!");
+ }
+ //search again and recover the cursor
+ cursor.reset();
+ rangePred.setLowKey(tempTupleReference, false);
+ rangePred.setHighKey(null, true);
+ indexAccessor.search(cursor, rangePred);
+ }
+ cloneUpdateTb.reset();
+ }
+ }
+}
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
index 99bca1a..f75dab2 100644
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/FunctionProxy.java
@@ -56,7 +56,7 @@
* @throws HyracksDataException
*/
public void functionOpen() throws HyracksDataException {
- inputRd = inputRdFactory.createRecordDescriptor();
+ inputRd = inputRdFactory.createRecordDescriptor(ctx);
tupleDe = new TupleDeserializer(inputRd);
ctxCL = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/SearchKeyTupleReference.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/SearchKeyTupleReference.java
new file mode 100644
index 0000000..86fa24a
--- /dev/null
+++ b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/util/SearchKeyTupleReference.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.dataflow.util;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class SearchKeyTupleReference implements ITupleReference {
+
+ private byte[] copiedData;
+ private int length;
+
+ public void reset(byte[] data, int start, int len) {
+ if (copiedData == null) {
+ copiedData = new byte[len];
+ }
+ if (copiedData.length < len) {
+ copiedData = new byte[len];
+ }
+ System.arraycopy(data, start, copiedData, 0, len);
+ length = len;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return 1;
+ }
+
+ @Override
+ public byte[] getFieldData(int fIdx) {
+ return copiedData;
+ }
+
+ @Override
+ public int getFieldStart(int fIdx) {
+ return 0;
+ }
+
+ @Override
+ public int getFieldLength(int fIdx) {
+ return length;
+ }
+
+}
diff --git a/pregelix/pregelix-dataflow/pom.xml b/pregelix/pregelix-dataflow/pom.xml
index 8e2be08..4c0da76 100644
--- a/pregelix/pregelix-dataflow/pom.xml
+++ b/pregelix/pregelix-dataflow/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
@@ -74,68 +74,68 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-dataflow-std-base</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-ipc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/FinalAggregateOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/FinalAggregateOperatorDescriptor.java
index eda7754..71592ee 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/FinalAggregateOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/FinalAggregateOperatorDescriptor.java
@@ -56,11 +56,11 @@
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
return new AbstractUnaryInputSinkOperatorNodePushable() {
- private Configuration conf = confFactory.createConfiguration();
+ private Configuration conf = confFactory.createConfiguration(ctx);
@SuppressWarnings("rawtypes")
private GlobalAggregator aggregator = BspUtils.createGlobalAggregator(conf);
private FrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(),
- inputRdFactory.createRecordDescriptor());
+ inputRdFactory.createRecordDescriptor(ctx));
private ByteBufferInputStream inputStream = new ByteBufferInputStream();
private DataInput input = new DataInputStream(inputStream);
private Writable partialAggregateValue = BspUtils.createFinalAggregateValue(conf);
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
index 2402748..1d9c778 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/HDFSFileWriteOperatorDescriptor.java
@@ -75,14 +75,15 @@
@Override
public void open() throws HyracksDataException {
rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
- : inputRdFactory.createRecordDescriptor();
+ : inputRdFactory.createRecordDescriptor(ctx);
frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
ctxCL = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
- conf = confFactory.createConfiguration();
+ conf = confFactory.createConfiguration(ctx);
VertexOutputFormat outputFormat = BspUtils.createVertexOutputFormat(conf);
context = ctxFactory.createContext(conf, partition);
+ context.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
try {
vertexWriter = outputFormat.createVertexWriter(context);
} catch (InterruptedException e) {
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingReadOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingReadOperatorDescriptor.java
index b1bb555..9a87f02 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingReadOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/MaterializingReadOperatorDescriptor.java
@@ -66,14 +66,7 @@
} catch (Exception e) {
writer.fail();
throw new HyracksDataException(e);
- } finally {
- /**
- * remove last iteration's state
- */
- IterationUtils.removeIterationState(ctx, partition);
- writer.close();
}
- complete = true;
}
}
@@ -84,7 +77,12 @@
@Override
public void close() throws HyracksDataException {
-
+ /**
+ * remove last iteration's state
+ */
+ IterationUtils.removeIterationState(ctx, partition);
+ writer.close();
+ complete = true;
}
};
}
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/TerminationStateWriterOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/TerminationStateWriterOperatorDescriptor.java
index 88a0dda..f54d176 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/TerminationStateWriterOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/TerminationStateWriterOperatorDescriptor.java
@@ -45,10 +45,10 @@
}
@Override
- public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+ public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
return new AbstractUnaryInputSinkOperatorNodePushable() {
- private Configuration conf = confFactory.createConfiguration();
+ private Configuration conf = confFactory.createConfiguration(ctx);
private boolean terminate = true;
@Override
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
index 0da7baf..343ba7e 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexFileScanOperatorDescriptor.java
@@ -81,15 +81,12 @@
final List<FileSplit> splits = splitsFactory.getSplits();
return new AbstractUnaryOutputSourceOperatorNodePushable() {
- private ClassLoader ctxCL;
private ContextFactory ctxFactory = new ContextFactory();
@Override
public void initialize() throws HyracksDataException {
- ctxCL = Thread.currentThread().getContextClassLoader();
try {
- Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
- Configuration conf = confFactory.createConfiguration();
+ Configuration conf = confFactory.createConfiguration(ctx);
writer.open();
for (int i = 0; i < scheduledLocations.length; i++) {
if (scheduledLocations[i].equals(ctx.getJobletContext().getApplicationContext().getNodeId())) {
@@ -109,8 +106,6 @@
writer.close();
} catch (Exception e) {
throw new HyracksDataException(e);
- } finally {
- Thread.currentThread().setContextClassLoader(ctxCL);
}
}
@@ -135,10 +130,11 @@
VertexInputFormat vertexInputFormat = BspUtils.createVertexInputFormat(conf);
InputSplit split = splits.get(splitId);
TaskAttemptContext mapperContext = ctxFactory.createContext(conf, splitId);
+ mapperContext.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
VertexReader vertexReader = vertexInputFormat.createVertexReader(split, mapperContext);
vertexReader.initialize(split, mapperContext);
- Vertex readerVertex = (Vertex) BspUtils.createVertex(conf);
+ Vertex readerVertex = (Vertex) BspUtils.createVertex(mapperContext.getConfiguration());
ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldSize);
DataOutput dos = tb.getDataOutput();
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java
index d7cbb3a..5da0239 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/VertexWriteOperatorDescriptor.java
@@ -64,7 +64,7 @@
@Override
public void open() throws HyracksDataException {
rd0 = inputRdFactory == null ? recordDescProvider.getInputRecordDescriptor(getActivityId(), 0)
- : inputRdFactory.createRecordDescriptor();
+ : inputRdFactory.createRecordDescriptor(ctx);
frameDeserializer = new FrameDeserializer(ctx.getFrameSize(), rd0);
try {
outputWriter = new PrintWriter(new OutputStreamWriter(new FileOutputStream(splits[partition]
diff --git a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/base/IConfigurationFactory.java b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/base/IConfigurationFactory.java
index b31f376..2e58196 100644
--- a/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/base/IConfigurationFactory.java
+++ b/pregelix/pregelix-dataflow/src/main/java/edu/uci/ics/pregelix/dataflow/base/IConfigurationFactory.java
@@ -18,10 +18,11 @@
import org.apache.hadoop.conf.Configuration;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
public interface IConfigurationFactory extends Serializable {
- public Configuration createConfiguration() throws HyracksDataException;
+ public Configuration createConfiguration(IHyracksTaskContext ctx) throws HyracksDataException;
}
diff --git a/pregelix/pregelix-dist/pom.xml b/pregelix/pregelix-dist/pom.xml
index 846ff66..0839966 100644
--- a/pregelix/pregelix-dist/pom.xml
+++ b/pregelix/pregelix-dist/pom.xml
@@ -4,7 +4,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<artifactId>pregelix-dist</artifactId>
<name>pregelix-dist</name>
@@ -69,13 +69,13 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-core</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-example</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
</dependencies>
</project>
diff --git a/pregelix/pregelix-example/data/clique2/clique.txt b/pregelix/pregelix-example/data/clique2/clique.txt
new file mode 100755
index 0000000..68fbaea
--- /dev/null
+++ b/pregelix/pregelix-example/data/clique2/clique.txt
@@ -0,0 +1,6 @@
+1 2 3 4
+2 1 3
+3 1 2 4 5
+4 1 3
+5 3 6
+6 5
\ No newline at end of file
diff --git a/pregelix/pregelix-example/data/clique3/clique.txt b/pregelix/pregelix-example/data/clique3/clique.txt
new file mode 100755
index 0000000..ed0b7c1
--- /dev/null
+++ b/pregelix/pregelix-example/data/clique3/clique.txt
@@ -0,0 +1,20 @@
+0 1 19
+1 2 3 4 5 6 7 8 9
+2 1 3 4 5 6 7 8 9
+3 1 2 4 5 6 7 8 9
+4 1 2 3 5 6 7 8 9
+5 1 2 3 4 6 7 8 9
+6 1 2 3 4 5 7 8 9
+7 1 2 3 4 5 6 8 9
+8 1 2 3 4 5 6 7 9
+9 1 2 3 4 5 6 7 8 10
+10 9 11
+11 10 12 13 14 15 16 17 18 19
+12 11 13 14 15 16 17 18 19
+13 11 12 14 15 16 17 18 19
+14 11 12 13 15 16 17 18 19
+15 11 12 13 14 16 17 18 19
+16 11 12 13 14 15 17 18 19
+17 11 12 13 14 15 16 18 19
+18 11 12 13 14 15 16 17 19
+19 0 11 12 13 14 15 16 17 18
\ No newline at end of file
diff --git a/pregelix/pregelix-example/pom.xml b/pregelix/pregelix-example/pom.xml
index 37da84f..1d08b8c 100644
--- a/pregelix/pregelix-example/pom.xml
+++ b/pregelix/pregelix-example/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
<build>
@@ -69,7 +69,7 @@
<version>2.7.2</version>
<configuration>
<forkMode>pertest</forkMode>
- <argLine>-enableassertions -Xmx512m -XX:MaxPermSize=300m -Dfile.encoding=UTF-8
+ <argLine>-enableassertions -Xmx2047m -XX:MaxPermSize=300m -Dfile.encoding=UTF-8
-Djava.util.logging.config.file=src/test/resources/logging.properties</argLine>
<includes>
<include>**/*TestSuite.java</include>
@@ -105,7 +105,7 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-core</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
index 74ae455..6adbf83 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ConnectedComponentsVertex.java
@@ -34,6 +34,7 @@
import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
import edu.uci.ics.pregelix.api.job.PregelixJob;
import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
import edu.uci.ics.pregelix.example.io.VLongWritable;
@@ -137,6 +138,7 @@
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimpleConnectedComponentsVertexOutputFormat.class);
job.setMessageCombinerClass(ConnectedComponentsVertex.SimpleMinCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
Client.run(args, job);
}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/GraphMutationVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/GraphMutationVertex.java
index e54373f..08de520 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/GraphMutationVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/GraphMutationVertex.java
@@ -30,6 +30,7 @@
import edu.uci.ics.pregelix.api.io.text.TextVertexOutputFormat.TextVertexWriter;
import edu.uci.ics.pregelix.api.job.PregelixJob;
import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
import edu.uci.ics.pregelix.example.io.VLongWritable;
@@ -40,6 +41,7 @@
private VLongWritable vid = new VLongWritable();
private GraphMutationVertex newVertex = null;
+ private DoubleWritable msg = new DoubleWritable(0.0);
@Override
public void compute(Iterator<DoubleWritable> msgIterator) {
@@ -47,17 +49,20 @@
if (newVertex == null) {
newVertex = new GraphMutationVertex();
}
- if (getVertexId().get() % 2 == 0 || getVertexId().get() % 3 == 0) {
- deleteVertex(getVertexId());
- } else {
- vid.set(100 * getVertexId().get());
- newVertex.setVertexId(vid);
- newVertex.setVertexValue(getVertexValue());
- addVertex(vid, newVertex);
+ if (getVertexId().get() < 100) {
+ if ((getVertexId().get() % 2 == 0 || getVertexId().get() % 3 == 0)) {
+ deleteVertex(getVertexId());
+ } else {
+ vid.set(100 * getVertexId().get());
+ newVertex.setVertexId(vid);
+ newVertex.setVertexValue(getVertexValue());
+ addVertex(vid, newVertex);
+ sendMsg(vid, msg);
+ }
}
voteToHalt();
} else {
- if (getVertexId().get() % 190 == 0) {
+ if (getVertexId().get() == 1900) {
deleteVertex(getVertexId());
}
voteToHalt();
@@ -105,6 +110,7 @@
job.setVertexClass(GraphMutationVertex.class);
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimpleGraphMutationVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
Client.run(args, job);
}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
index b6d4da7..752b23a 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/PageRankVertex.java
@@ -43,6 +43,7 @@
import edu.uci.ics.pregelix.api.job.PregelixJob;
import edu.uci.ics.pregelix.api.util.BspUtils;
import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
import edu.uci.ics.pregelix.example.io.VLongWritable;
@@ -216,6 +217,7 @@
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
Client.run(args, job);
}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
index 0895386..8102c14 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ReachabilityVertex.java
@@ -40,6 +40,7 @@
import edu.uci.ics.pregelix.api.util.BspUtils;
import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.inputformat.TextReachibilityVertexInputFormat;
import edu.uci.ics.pregelix.example.io.VLongWritable;
@@ -223,6 +224,7 @@
job.setVertexInputFormatClass(TextReachibilityVertexInputFormat.class);
job.setVertexOutputFormatClass(SimpleReachibilityVertexOutputFormat.class);
job.setMessageCombinerClass(ReachabilityVertex.SimpleReachibilityCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
Client.run(args, job);
System.out.println("reachable? " + readReachibilityResult(job.getConfiguration()));
}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
index 199870e..396acae 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/ShortestPathsVertex.java
@@ -30,6 +30,7 @@
import edu.uci.ics.pregelix.api.job.PregelixJob;
import edu.uci.ics.pregelix.example.PageRankVertex.SimplePageRankVertexOutputFormat;
import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.inputformat.TextShortestPathsInputFormat;
import edu.uci.ics.pregelix.example.io.VLongWritable;
@@ -138,6 +139,7 @@
job.setVertexInputFormatClass(TextShortestPathsInputFormat.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
job.getConfiguration().setLong(SOURCE_ID, 0);
Client.run(args, job);
}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/data/VLongNormalizedKeyComputer.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/data/VLongNormalizedKeyComputer.java
new file mode 100644
index 0000000..7d824ea
--- /dev/null
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/data/VLongNormalizedKeyComputer.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.example.data;
+
+import edu.uci.ics.pregelix.api.graph.NormalizedKeyComputer;
+import edu.uci.ics.pregelix.api.util.SerDeUtils;
+
+/**
+ * @author yingyib
+ */
+public class VLongNormalizedKeyComputer implements NormalizedKeyComputer {
+
+ private static final int POSTIVE_LONG_MASK = (3 << 30);
+ private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
+ private static final int NEGATIVE_LONG_MASK = (0 << 30);
+
+ @Override
+ public int getNormalizedKey(byte[] bytes, int start, int length) {
+ long value = SerDeUtils.readVLong(bytes, start, length);
+ int highValue = (int) (value >> 32);
+ if (highValue > 0) {
+ /**
+ * larger than Integer.MAX
+ */
+ int highNmk = getKey(highValue);
+ highNmk >>= 2;
+ highNmk |= POSTIVE_LONG_MASK;
+ return highNmk;
+ } else if (highValue == 0) {
+ /**
+ * smaller than Integer.MAX but >=0
+ */
+ int lowNmk = (int) value;
+ lowNmk >>= 2;
+ lowNmk |= NON_NEGATIVE_INT_MASK;
+ return lowNmk;
+ } else {
+ /**
+ * less than 0; TODO: have not optimized for that
+ */
+ int highNmk = getKey(highValue);
+ highNmk >>= 2;
+ highNmk |= NEGATIVE_LONG_MASK;
+ return highNmk;
+ }
+ }
+
+ private int getKey(int value) {
+ long unsignedFirstValue = (long) value;
+ int nmk = (int) ((unsignedFirstValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
+ return nmk;
+ }
+
+}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextPageRankInputFormat.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextPageRankInputFormat.java
index a8a752e..8d0b776 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextPageRankInputFormat.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/inputformat/TextPageRankInputFormat.java
@@ -39,7 +39,7 @@
@Override
public VertexReader<VLongWritable, DoubleWritable, FloatWritable, DoubleWritable> createVertexReader(
InputSplit split, TaskAttemptContext context) throws IOException {
- return new TextPageRankGraphReader(textInputFormat.createRecordReader(split, context));
+ return new TextPageRankGraphReader(textInputFormat.createRecordReader(split, context), context);
}
}
@@ -52,7 +52,7 @@
private List<VLongWritable> pool = new ArrayList<VLongWritable>();
private int used = 0;
- public TextPageRankGraphReader(RecordReader<LongWritable, Text> lineRecordReader) {
+ public TextPageRankGraphReader(RecordReader<LongWritable, Text> lineRecordReader, TaskAttemptContext context) {
super(lineRecordReader);
}
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java
index 0e22ea1..d2c5e6f 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/CliquesWritable.java
@@ -32,6 +32,7 @@
private List<VLongWritable> cliques = new ArrayList<VLongWritable>();
private int sizeOfClique = 0;
+ private VLongWritable srcId = new VLongWritable(0);
public CliquesWritable(List<VLongWritable> cliques, int sizeOfClique) {
this.cliques = cliques;
@@ -43,6 +44,16 @@
}
/**
+ * Set the srcId
+ *
+ * @param srcId
+ * the source vertex Id
+ */
+ public void setSrcId(VLongWritable srcId) {
+ this.srcId = srcId;
+ }
+
+ /**
* Set the size of cliques.
*
* @param sizeOfClique
@@ -103,6 +114,11 @@
cliques.add(vid);
}
}
+
+ if (srcId == null) {
+ srcId = new VLongWritable();
+ }
+ srcId.readFields(input);
}
@Override
@@ -117,6 +133,8 @@
for (int i = 0; i < cliques.size(); i++) {
cliques.get(i).write(output);
}
+
+ srcId.write(output);
}
@Override
@@ -126,11 +144,14 @@
StringBuffer sb = new StringBuffer();
int numCliques = cliques.size() / sizeOfClique;
for (int i = 0; i < numCliques; i++) {
+ sb.append(srcId);
+ sb.append(",");
+ int start = i * sizeOfClique;
for (int j = 0; j < sizeOfClique - 1; j++) {
- sb.append(cliques.get(j));
+ sb.append(cliques.get(start + j));
sb.append(",");
}
- sb.append(cliques.get(sizeOfClique - 1));
+ sb.append(cliques.get(start + sizeOfClique - 1));
sb.append(";");
}
return sb.toString();
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java
index 266feb7..2ff800e 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/maximalclique/MaximalCliqueVertex.java
@@ -39,13 +39,14 @@
import edu.uci.ics.pregelix.api.util.BspUtils;
import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.io.VLongWritable;
import edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex;
/**
* The maximal clique example -- find maximal cliques in an undirected graph.
- * The result cliques contains vertexes ordered by the vertex id ascendingly. The algorithm takes
- * advantage of that property to do effective pruning.
+ * The result cliques contains vertexes ordered by the vertex id ascendingly.
+ * The algorithm takes advantage of that property to do effective pruning.
*/
public class MaximalCliqueVertex extends Vertex<VLongWritable, CliquesWritable, NullWritable, AdjacencyListWritable> {
@@ -55,7 +56,7 @@
private int largestCliqueSizeSoFar = 0;
private List<BitSet> currentMaximalCliques = new ArrayList<BitSet>();
private CliquesWritable tmpValue = new CliquesWritable();
- private List<VLongWritable> cliques = new ArrayList<VLongWritable>();
+ private List<VLongWritable> cliqueList = new ArrayList<VLongWritable>();
/**
* Update the current maximal cliques
@@ -68,7 +69,6 @@
vertexList.clear();
invertedMap.clear();
currentMaximalCliques.clear();
- cliques.clear();
tmpValue.reset();
// build the initial sub graph
@@ -76,8 +76,6 @@
AdjacencyListWritable adj = values.next();
map.put(adj.getSource(), adj);
}
- VLongWritable srcId = getVertexId();
- map.put(srcId, new AdjacencyListWritable());
// build the vertex list (vertex id in ascending order) and the inverted list of vertexes
int i = 0;
@@ -86,12 +84,14 @@
invertedMap.put(v, i++);
}
- //clean up adjacency list --- remove vertexes who are not neighbors of key
+ // clean up adjacency list --- remove vertexes who are not neighbors of
+ // key
for (AdjacencyListWritable adj : map.values()) {
adj.cleanNonMatch(vertexList);
}
- // get the h-index of the subgraph --- which is the maximum depth to explore
+ // get the h-index of the subgraph --- which is the maximum depth to
+ // explore
int[] neighborCounts = new int[map.size()];
i = 0;
for (AdjacencyListWritable adj : map.values()) {
@@ -105,11 +105,13 @@
}
h++;
}
- if (h < largestCliqueSizeSoFar) {
+
+ // the clique size is upper-bounded by h+1
+ if (h + 1 < largestCliqueSizeSoFar) {
return;
}
- //start depth-first search
+ // start depth-first search
BitSet cliqueSoFar = new BitSet(h);
for (VLongWritable v : vertexList) {
cliqueSoFar.set(invertedMap.get(v));
@@ -117,16 +119,15 @@
cliqueSoFar.clear();
}
- //output local maximal cliques
+ // output local maximal cliques
+ tmpValue.setSrcId(getVertexId());
for (BitSet clique : currentMaximalCliques) {
- int keyIndex = invertedMap.get(srcId);
- clique.set(keyIndex);
generateClique(clique);
- tmpValue.addCliques(cliques);
+ tmpValue.addCliques(cliqueList);
tmpValue.setCliqueSize(clique.cardinality());
}
- //update the vertex state
+ // update the vertex state
setVertexValue(tmpValue);
}
@@ -136,13 +137,15 @@
* @param clique
* the bitmap representation of a clique
*/
- private void generateClique(BitSet clique) {
+ private List<VLongWritable> generateClique(BitSet clique) {
+ cliqueList.clear();
for (int j = 0; j < clique.length();) {
j = clique.nextSetBit(j);
VLongWritable v = vertexList.get(j);
- cliques.add(v);
+ cliqueList.add(v);
j++;
}
+ return cliqueList;
}
/**
@@ -170,7 +173,7 @@
while (neighbors.hasNext()) {
VLongWritable neighbor = neighbors.next();
if (!isTested(neighbor, cliqueSoFar) && isClique(neighbor, cliqueSoFar)) {
- //snapshot the clique
+ // snapshot the clique
int cliqueLength = cliqueSoFar.length();
// expand the clique
cliqueSoFar.set(invertedMap.get(neighbor));
@@ -217,7 +220,8 @@
int largestSetIndex = cliqueSoFar.length() - 1;
if (index > largestSetIndex) {
// we only return cliques with vertexes in the ascending order
- // hence, the new vertex must be larger than the largesetSetIndex in the clique
+ // hence, the new vertex must be larger than the largesetSetIndex in
+ // the clique
return false;
} else {
// otherwise, we think the vertex is "tested"
@@ -236,7 +240,8 @@
*/
private boolean isClique(VLongWritable newVertex, BitSet cliqueSoFar) {
AdjacencyListWritable adj = map.get(newVertex);
- // check whether each existing vertex is in the neighbor set of newVertex
+ // check whether each existing vertex is in the neighbor set of
+ // newVertex
for (int i = 0; i < cliqueSoFar.length();) {
i = cliqueSoFar.nextSetBit(i);
VLongWritable v = vertexList.get(i);
@@ -249,9 +254,8 @@
}
/**
- * For superstep 1, send outgoing mesages.
- * For superstep 2, calculate maximal cliques.
- * otherwise, vote to halt.
+ * For superstep 1, send outgoing mesages. For superstep 2, calculate
+ * maximal cliques. otherwise, vote to halt.
*/
@Override
public void compute(Iterator<AdjacencyListWritable> msgIterator) {
@@ -287,6 +291,7 @@
job.setDynamicVertexValueSize(true);
job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
Client.run(args, job);
System.out.println("maximal cliques: \n" + readMaximalCliqueResult(job.getConfiguration()));
}
@@ -300,9 +305,11 @@
private void sendOutgoingMsgs(List<Edge<VLongWritable, NullWritable>> edges) {
for (int i = 0; i < edges.size(); i++) {
if (edges.get(i).getDestVertexId().get() < getVertexId().get()) {
- // only add emit for the vertexes whose id is smaller than the vertex id
+ // only add emit for the vertexes whose id is smaller than the
+ // vertex id
// to avoid the duplicate removal step,
- // because all the resulting cliques will have vertexes in the ascending order.
+ // because all the resulting cliques will have vertexes in the
+ // ascending order.
AdjacencyListWritable msg = new AdjacencyListWritable();
msg.setSource(getVertexId());
for (int j = i + 1; j < edges.size(); j++) {
diff --git a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java
index d3db095..89bfee8 100644
--- a/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java
+++ b/pregelix/pregelix-example/src/main/java/edu/uci/ics/pregelix/example/trianglecounting/TriangleCountingVertex.java
@@ -20,6 +20,7 @@
import edu.uci.ics.pregelix.api.util.BspUtils;
import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
import edu.uci.ics.pregelix.example.client.Client;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.io.VLongWritable;
/**
@@ -134,6 +135,7 @@
job.setGlobalAggregatorClass(TriangleCountingAggregator.class);
job.setVertexInputFormatClass(TextTriangleCountingInputFormat.class);
job.setVertexOutputFormatClass(TriangleCountingVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
Client.run(args, job);
System.out.println("triangle count: " + readTriangleCountingResult(job.getConfiguration()));
}
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
index ca5a1c4..8fafede 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobgen/JobGenerator.java
@@ -33,6 +33,7 @@
import edu.uci.ics.pregelix.example.ReachabilityVertex;
import edu.uci.ics.pregelix.example.ReachabilityVertex.SimpleReachibilityVertexOutputFormat;
import edu.uci.ics.pregelix.example.ShortestPathsVertex;
+import edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer;
import edu.uci.ics.pregelix.example.inputformat.TextConnectedComponentsInputFormat;
import edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat;
import edu.uci.ics.pregelix.example.inputformat.TextReachibilityVertexInputFormat;
@@ -55,6 +56,8 @@
private static String HDFS_OUTPUTPAH2 = "/resultcomplex";
private static String HDFS_INPUTPATH3 = "/clique";
+ private static String HDFS_INPUTPATH4 = "/clique2";
+ private static String HDFS_INPUTPATH5 = "/clique3";
private static String HDFS_OUTPUTPAH3 = "/resultclique";
private static void generatePageRankJobReal(String jobName, String outputPath) throws IOException {
@@ -63,6 +66,7 @@
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
@@ -75,6 +79,7 @@
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
@@ -87,6 +92,7 @@
job.setVertexInputFormatClass(TextShortestPathsInputFormat.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
@@ -99,6 +105,7 @@
job.setVertexClass(PageRankVertex.class);
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
@@ -111,6 +118,7 @@
job.setVertexInputFormatClass(TextConnectedComponentsInputFormat.class);
job.setVertexOutputFormatClass(SimpleConnectedComponentsVertexOutputFormat.class);
job.setMessageCombinerClass(ConnectedComponentsVertex.SimpleMinCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
@@ -123,6 +131,7 @@
job.setVertexInputFormatClass(TextConnectedComponentsInputFormat.class);
job.setVertexOutputFormatClass(SimpleConnectedComponentsVertexOutputFormat.class);
job.setMessageCombinerClass(ConnectedComponentsVertex.SimpleMinCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
@@ -135,6 +144,7 @@
job.setVertexInputFormatClass(TextReachibilityVertexInputFormat.class);
job.setVertexOutputFormatClass(SimpleReachibilityVertexOutputFormat.class);
job.setMessageCombinerClass(ReachabilityVertex.SimpleReachibilityCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
@@ -150,6 +160,7 @@
job.setVertexInputFormatClass(TextReachibilityVertexInputFormat.class);
job.setVertexOutputFormatClass(SimpleReachibilityVertexOutputFormat.class);
job.setMessageCombinerClass(ReachabilityVertex.SimpleReachibilityCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 23);
@@ -164,6 +175,7 @@
job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
@@ -176,6 +188,7 @@
job.setVertexInputFormatClass(SimulatedPageRankVertexInputFormat.class);
job.setMessageCombinerClass(ShortestPathsVertex.SimpleMinCombiner.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
@@ -189,6 +202,7 @@
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimplePageRankVertexOutputFormat.class);
job.setMessageCombinerClass(PageRankVertex.SimpleSumCombiner.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
job.setDynamicVertexValueSize(true);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
@@ -202,6 +216,7 @@
job.setGlobalAggregatorClass(TriangleCountingAggregator.class);
job.setVertexInputFormatClass(TextTriangleCountingInputFormat.class);
job.setVertexOutputFormatClass(TriangleCountingVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
@@ -214,16 +229,44 @@
job.setDynamicVertexValueSize(true);
job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
}
+
+ private static void generateMaximalCliqueJob2(String jobName, String outputPath) throws IOException {
+ PregelixJob job = new PregelixJob(jobName);
+ job.setVertexClass(MaximalCliqueVertex.class);
+ job.setGlobalAggregatorClass(MaximalCliqueAggregator.class);
+ job.setDynamicVertexValueSize(true);
+ job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
+ job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
+ FileInputFormat.setInputPaths(job, HDFS_INPUTPATH4);
+ FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+ job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+ }
+
+ private static void generateMaximalCliqueJob3(String jobName, String outputPath) throws IOException {
+ PregelixJob job = new PregelixJob(jobName);
+ job.setVertexClass(MaximalCliqueVertex.class);
+ job.setGlobalAggregatorClass(MaximalCliqueAggregator.class);
+ job.setDynamicVertexValueSize(true);
+ job.setVertexInputFormatClass(TextMaximalCliqueInputFormat.class);
+ job.setVertexOutputFormatClass(MaximalCliqueVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
+ FileInputFormat.setInputPaths(job, HDFS_INPUTPATH5);
+ FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+ job.getConfiguration().writeXml(new FileOutputStream(new File(outputPath)));
+ }
private static void generateGraphMutationJob(String jobName, String outputPath) throws IOException {
PregelixJob job = new PregelixJob(jobName);
job.setVertexClass(GraphMutationVertex.class);
job.setVertexInputFormatClass(TextPageRankInputFormat.class);
job.setVertexOutputFormatClass(SimpleGraphMutationVertexOutputFormat.class);
+ job.setNoramlizedKeyComputerClass(VLongNormalizedKeyComputer.class);
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH));
job.getConfiguration().setLong(PregelixJob.NUM_VERTICE, 20);
@@ -261,6 +304,8 @@
private static void genMaximalClique() throws IOException {
generateMaximalCliqueJob("Maximal Clique", outputBase + "MaximalClique.xml");
+ generateMaximalCliqueJob2("Maximal Clique 2", outputBase + "MaximalClique2.xml");
+ generateMaximalCliqueJob3("Maximal Clique 3", outputBase + "MaximalClique3.xml");
}
private static void genGraphMutation() throws IOException {
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
index 5a556fa..7126e8c 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestCase.java
@@ -19,26 +19,20 @@
import junit.framework.TestCase;
+import org.apache.commons.io.FileUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.junit.Test;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.pregelix.api.job.PregelixJob;
-import edu.uci.ics.pregelix.core.jobgen.JobGen;
-import edu.uci.ics.pregelix.core.jobgen.JobGenInnerJoin;
-import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoin;
-import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSingleSort;
-import edu.uci.ics.pregelix.core.jobgen.JobGenOuterJoinSort;
+import edu.uci.ics.pregelix.core.base.IDriver.Plan;
+import edu.uci.ics.pregelix.core.driver.Driver;
import edu.uci.ics.pregelix.core.util.PregelixHyracksIntegrationUtil;
-import edu.uci.ics.pregelix.dataflow.util.IterationUtils;
import edu.uci.ics.pregelix.example.util.TestUtils;
public class RunJobTestCase extends TestCase {
- private static final String NC1 = "nc1";
- private static final String HYRACKS_APP_NAME = "pregelix";
private static String HDFS_INPUTPATH = "/webmap";
private static String HDFS_OUTPUTPAH = "/result";
@@ -48,14 +42,21 @@
private static String HDFS_INPUTPATH3 = "/clique";
private static String HDFS_OUTPUTPAH3 = "/resultclique";
- private final PregelixJob job;
- private JobGen[] giraphJobGens;
- private final String resultFileName;
- private final String expectedFileName;
- private final String jobFile;
+ private static String HDFS_INPUTPATH4 = "/clique2";
+ private static String HDFS_OUTPUTPAH4 = "/resultclique";
- public RunJobTestCase(String hadoopConfPath, String jobName, String jobFile, String resultFile, String expectedFile)
- throws Exception {
+ private static String HDFS_INPUTPATH5 = "/clique3";
+ private static String HDFS_OUTPUTPAH5 = "/resultclique";
+
+ private final PregelixJob job;
+ private final String resultFileDir;
+ private final String expectedFileDir;
+ private final String jobFile;
+ private final Driver driver = new Driver(this.getClass());
+ private final FileSystem dfs;
+
+ public RunJobTestCase(String hadoopConfPath, String jobName, String jobFile, String resultFile,
+ String expectedFile, FileSystem dfs) throws Exception {
super("test");
this.jobFile = jobFile;
this.job = new PregelixJob("test");
@@ -68,21 +69,20 @@
} else if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH2)) {
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH2);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH2));
- } else {
+ } else if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH3)) {
FileInputFormat.setInputPaths(job, HDFS_INPUTPATH3);
FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH3));
+ } else if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH4)) {
+ FileInputFormat.setInputPaths(job, HDFS_INPUTPATH4);
+ FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH4));
+ } else if (inputPaths[0].toString().endsWith(HDFS_INPUTPATH5)) {
+ FileInputFormat.setInputPaths(job, HDFS_INPUTPATH5);
+ FileOutputFormat.setOutputPath(job, new Path(HDFS_OUTPUTPAH5));
}
job.setJobName(jobName);
- this.resultFileName = resultFile;
- this.expectedFileName = expectedFile;
- giraphJobGens = new JobGen[4];
- giraphJobGens[0] = new JobGenOuterJoin(job);
- waitawhile();
- giraphJobGens[1] = new JobGenInnerJoin(job);
- waitawhile();
- giraphJobGens[2] = new JobGenOuterJoinSort(job);
- waitawhile();
- giraphJobGens[3] = new JobGenOuterJoinSingleSort(job);
+ this.resultFileDir = resultFile;
+ this.expectedFileDir = expectedFile;
+ this.dfs = dfs;
}
private void waitawhile() throws InterruptedException {
@@ -94,89 +94,20 @@
@Test
public void test() throws Exception {
setUp();
- for (JobGen jobGen : giraphJobGens) {
- FileSystem dfs = FileSystem.get(job.getConfiguration());
- dfs.delete(new Path(HDFS_OUTPUTPAH), true);
- runCreate(jobGen);
- runDataLoad(jobGen);
- int i = 1;
- boolean terminate = false;
- do {
- runLoopBodyIteration(jobGen, i);
- terminate = IterationUtils.readTerminationState(job.getConfiguration(), jobGen.getJobId());
- i++;
- } while (!terminate);
- runIndexScan(jobGen);
- runHDFSWRite(jobGen);
- runCleanup(jobGen);
+ Plan[] plans = new Plan[] { Plan.OUTER_JOIN_SORT, Plan.OUTER_JOIN, Plan.INNER_JOIN, Plan.OUTER_JOIN_SINGLE_SORT };
+ for (Plan plan : plans) {
+ driver.runJob(job, plan, PregelixHyracksIntegrationUtil.CC_HOST,
+ PregelixHyracksIntegrationUtil.TEST_HYRACKS_CC_CLIENT_PORT, false);
compareResults();
}
tearDown();
waitawhile();
}
- private void runCreate(JobGen jobGen) throws Exception {
- try {
- JobSpecification treeCreateJobSpec = jobGen.generateCreatingJob();
- PregelixHyracksIntegrationUtil.runJob(treeCreateJobSpec, HYRACKS_APP_NAME);
- } catch (Exception e) {
- throw e;
- }
- }
-
- private void runDataLoad(JobGen jobGen) throws Exception {
- try {
- JobSpecification bulkLoadJobSpec = jobGen.generateLoadingJob();
- PregelixHyracksIntegrationUtil.runJob(bulkLoadJobSpec, HYRACKS_APP_NAME);
- } catch (Exception e) {
- throw e;
- }
- }
-
- private void runLoopBodyIteration(JobGen jobGen, int iteration) throws Exception {
- try {
- JobSpecification loopBody = jobGen.generateJob(iteration);
- PregelixHyracksIntegrationUtil.runJob(loopBody, HYRACKS_APP_NAME);
- } catch (Exception e) {
- throw e;
- }
- }
-
- private void runIndexScan(JobGen jobGen) throws Exception {
- try {
- JobSpecification scanSortPrintJobSpec = jobGen.scanIndexPrintGraph(NC1, resultFileName);
- PregelixHyracksIntegrationUtil.runJob(scanSortPrintJobSpec, HYRACKS_APP_NAME);
- } catch (Exception e) {
- throw e;
- }
- }
-
- private void runHDFSWRite(JobGen jobGen) throws Exception {
- try {
- JobSpecification scanSortPrintJobSpec = jobGen.scanIndexWriteGraph();
- PregelixHyracksIntegrationUtil.runJob(scanSortPrintJobSpec, HYRACKS_APP_NAME);
- } catch (Exception e) {
- throw e;
- }
- }
-
- private void runCleanup(JobGen jobGen) throws Exception {
- try {
- JobSpecification[] cleanups = jobGen.generateCleanup();
- runJobArray(cleanups);
- } catch (Exception e) {
- throw e;
- }
- }
-
- private void runJobArray(JobSpecification[] jobs) throws Exception {
- for (JobSpecification job : jobs) {
- PregelixHyracksIntegrationUtil.runJob(job, HYRACKS_APP_NAME);
- }
- }
-
private void compareResults() throws Exception {
- TestUtils.compareWithResult(new File(resultFileName), new File(expectedFileName));
+ FileUtils.deleteQuietly(new File(resultFileDir));
+ dfs.copyToLocalFile(FileOutputFormat.getOutputPath(job), new Path(resultFileDir));
+ TestUtils.compareWithResultDir(new File(expectedFileDir), new File(resultFileDir));
}
public String toString() {
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
index 4bf83e6..0a444fa 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/jobrun/RunJobTestSuite.java
@@ -51,7 +51,6 @@
private static final String PATH_TO_JOBS = "src/test/resources/jobs/";
private static final String PATH_TO_IGNORE = "src/test/resources/ignore.txt";
private static final String PATH_TO_ONLY = "src/test/resources/only.txt";
- private static final String FILE_EXTENSION_OF_RESULTS = "result";
private static final String DATA_PATH = "data/webmap/webmap_link.txt";
private static final String HDFS_PATH = "/webmap/";
@@ -62,7 +61,12 @@
private static final String DATA_PATH3 = "data/clique/clique.txt";
private static final String HDFS_PATH3 = "/clique/";
- private static final String HYRACKS_APP_NAME = "pregelix";
+ private static final String DATA_PATH4 = "data/clique2/clique.txt";
+ private static final String HDFS_PATH4 = "/clique2/";
+
+ private static final String DATA_PATH5 = "data/clique3/clique.txt";
+ private static final String HDFS_PATH5 = "/clique3/";
+
private static final String HADOOP_CONF_PATH = ACTUAL_RESULT_DIR + File.separator + "conf.xml";
private MiniDFSCluster dfsCluster;
@@ -111,6 +115,16 @@
dfs.mkdirs(dest);
dfs.copyFromLocalFile(src, dest);
+ src = new Path(DATA_PATH4);
+ dest = new Path(HDFS_PATH4);
+ dfs.mkdirs(dest);
+ dfs.copyFromLocalFile(src, dest);
+
+ src = new Path(DATA_PATH5);
+ dest = new Path(HDFS_PATH5);
+ dfs.mkdirs(dest);
+ dfs.copyFromLocalFile(src, dest);
+
DataOutputStream confOutput = new DataOutputStream(new FileOutputStream(new File(HADOOP_CONF_PATH)));
conf.writeXml(confOutput);
confOutput.flush();
@@ -138,6 +152,7 @@
RunJobTestSuite testSuite = new RunJobTestSuite();
testSuite.setUp();
boolean onlyEnabled = false;
+ FileSystem dfs = FileSystem.get(testSuite.conf);
if (onlys.size() > 0) {
onlyEnabled = true;
@@ -153,7 +168,7 @@
String resultFileName = ACTUAL_RESULT_DIR + File.separator + jobExtToResExt(qFile.getName());
String expectedFileName = EXPECTED_RESULT_DIR + File.separator + jobExtToResExt(qFile.getName());
testSuite.addTest(new RunJobTestCase(HADOOP_CONF_PATH, qFile.getName(), qFile.getAbsolutePath()
- .toString(), resultFileName, expectedFileName));
+ .toString(), resultFileName, expectedFileName, dfs));
}
}
}
@@ -193,7 +208,7 @@
private static String jobExtToResExt(String fname) {
int dot = fname.lastIndexOf('.');
- return fname.substring(0, dot + 1) + FILE_EXTENSION_OF_RESULTS;
+ return fname.substring(0, dot);
}
private static boolean isInList(List<String> onlys, String name) {
diff --git a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
index d89ec46..d406125 100644
--- a/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
+++ b/pregelix/pregelix-example/src/test/java/edu/uci/ics/pregelix/example/util/TestUtils.java
@@ -20,6 +20,13 @@
public class TestUtils {
+ public static void compareWithResultDir(File expectedFileDir, File actualFileDir) throws Exception {
+ String[] fileNames = expectedFileDir.list();
+ for (String fileName : fileNames) {
+ compareWithResult(new File(expectedFileDir, fileName), new File(actualFileDir, fileName));
+ }
+ }
+
public static void compareWithResult(File expectedFile, File actualFile) throws Exception {
BufferedReader readerExpected = new BufferedReader(new FileReader(expectedFile));
BufferedReader readerActual = new BufferedReader(new FileReader(actualFile));
@@ -28,7 +35,6 @@
try {
while ((lineExpected = readerExpected.readLine()) != null) {
lineActual = readerActual.readLine();
- // Assert.assertEquals(lineExpected, lineActual);
if (lineActual == null) {
throw new Exception("Actual result changed at line " + num + ":\n< " + lineExpected + "\n> ");
}
@@ -62,8 +68,10 @@
if (row1.equals(row2))
continue;
- String[] fields1 = row1.split(" ");
- String[] fields2 = row2.split(" ");
+ boolean spaceOrTab = false;
+ spaceOrTab = row1.contains(" ");
+ String[] fields1 = spaceOrTab ? row1.split(" ") : row1.split("\t");
+ String[] fields2 = spaceOrTab ? row2.split(" ") : row2.split("\t");
for (int j = 0; j < fields1.length; j++) {
if (fields1[j].equals(fields2[j])) {
@@ -76,7 +84,7 @@
float float1 = (float) double1.doubleValue();
float float2 = (float) double2.doubleValue();
- if (Math.abs(float1 - float2) == 0)
+ if (Math.abs(float1 - float2) < 1.0e-7)
continue;
else {
return false;
diff --git a/pregelix/pregelix-example/src/test/resources/cluster/stores.properties b/pregelix/pregelix-example/src/test/resources/cluster/stores.properties
index daf881e..04732be 100644
--- a/pregelix/pregelix-example/src/test/resources/cluster/stores.properties
+++ b/pregelix/pregelix-example/src/test/resources/cluster/stores.properties
@@ -1 +1 @@
-store=teststore
\ No newline at end of file
+store=teststore1,teststore2
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
deleted file mode 100644
index 45376e2..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
+++ /dev/null
@@ -1,20 +0,0 @@
-0 0
-1 0
-2 0
-3 0
-4 0
-5 0
-6 0
-7 0
-8 0
-9 0
-10 0
-11 0
-12 0
-13 0
-14 0
-15 0
-16 0
-17 0
-18 0
-19 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-0 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-0
new file mode 100755
index 0000000..f1f1d9b
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-0
@@ -0,0 +1,5 @@
+0 0
+4 0
+8 0
+12 0
+16 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-1 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-1
new file mode 100755
index 0000000..0fa02c1
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-1
@@ -0,0 +1,5 @@
+1 0
+5 0
+9 0
+13 0
+17 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-2 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-2
new file mode 100755
index 0000000..542ccae
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-2
@@ -0,0 +1,5 @@
+2 0
+6 0
+10 0
+14 0
+18 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-3 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-3
new file mode 100755
index 0000000..1d5d6d9
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal/part-3
@@ -0,0 +1,5 @@
+3 0
+7 0
+11 0
+15 0
+19 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
deleted file mode 100644
index dbc30fc..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
+++ /dev/null
@@ -1,23 +0,0 @@
-0 0
-1 0
-2 0
-3 0
-4 0
-5 0
-6 0
-7 0
-8 0
-9 0
-10 0
-11 0
-12 0
-13 0
-14 0
-15 0
-16 0
-17 0
-18 0
-19 0
-21 21
-25 25
-27 27
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-0 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-0
new file mode 100755
index 0000000..f1f1d9b
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-0
@@ -0,0 +1,5 @@
+0 0
+4 0
+8 0
+12 0
+16 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-1 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-1
new file mode 100755
index 0000000..4e7d87a
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-1
@@ -0,0 +1,7 @@
+1 0
+5 0
+9 0
+13 0
+17 0
+21 21
+25 25
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-2 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-2
new file mode 100755
index 0000000..542ccae
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-2
@@ -0,0 +1,5 @@
+2 0
+6 0
+10 0
+14 0
+18 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-3 b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-3
new file mode 100755
index 0000000..513f3ff
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex/part-3
@@ -0,0 +1,6 @@
+3 0
+7 0
+11 0
+15 0
+19 0
+27 27
diff --git a/pregelix/pregelix-example/src/test/resources/expected/GraphMutation.result b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation.result
deleted file mode 100644
index a30166c..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/GraphMutation.result
+++ /dev/null
@@ -1,13 +0,0 @@
-1 0.0
-5 0.0
-7 0.0
-11 0.0
-13 0.0
-17 0.0
-19 0.0
-100 0.0
-500 0.0
-700 0.0
-1100 0.0
-1300 0.0
-1700 0.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-0 b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-0
new file mode 100755
index 0000000..b5f7ed3
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-0
@@ -0,0 +1,6 @@
+100 0.0
+500 0.0
+700 0.0
+1100 0.0
+1300 0.0
+1700 0.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-1 b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-1
new file mode 100755
index 0000000..4eca51d
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-1
@@ -0,0 +1,4 @@
+1 0.0
+5 0.0
+13 0.0
+17 0.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-2 b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-2
new file mode 100755
index 0000000..e69de29
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-2
diff --git a/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-3 b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-3
new file mode 100755
index 0000000..9446ff7
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/GraphMutation/part-3
@@ -0,0 +1,3 @@
+7 0.0
+11 0.0
+19 0.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
deleted file mode 100644
index d238037..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
+++ /dev/null
@@ -1,7 +0,0 @@
-1 1,2,3,4;
-2 2,3,4;
-3
-4
-5
-6
-7
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-0 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-0
new file mode 100755
index 0000000..902fadf
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-0
@@ -0,0 +1 @@
+4
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-1 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-1
new file mode 100755
index 0000000..ba34424
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-1
@@ -0,0 +1,2 @@
+1 1,2,3,4;
+5
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-2 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-2
new file mode 100755
index 0000000..834e389
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-2
@@ -0,0 +1,2 @@
+2 2,3,4;
+6
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-3 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-3
new file mode 100755
index 0000000..b8e2461
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique/part-3
@@ -0,0 +1,2 @@
+3 3,4;
+7
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-0 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-0
new file mode 100755
index 0000000..902fadf
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-0
@@ -0,0 +1 @@
+4
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-1 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-1
new file mode 100755
index 0000000..b83e1a3
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-1
@@ -0,0 +1,2 @@
+1 1,2,3;1,3,4;
+5
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-2 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-2
new file mode 100755
index 0000000..45e2a23
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-2
@@ -0,0 +1,2 @@
+2 2,3;
+6
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-3 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-3
new file mode 100755
index 0000000..447aa38
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique2/part-3
@@ -0,0 +1 @@
+3 3,4;3,5;
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-0 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-0
new file mode 100755
index 0000000..b4dced4
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-0
@@ -0,0 +1,5 @@
+0 0,19;
+4 4,5,6,7,8,9;
+8
+12 12,13,14,15,16,17,18,19;
+16
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-1 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-1
new file mode 100755
index 0000000..f554dfe
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-1
@@ -0,0 +1,5 @@
+1 1,2,3,4,5,6,7,8,9;
+5
+9
+13
+17
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-2 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-2
new file mode 100755
index 0000000..4df9b8c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-2
@@ -0,0 +1,5 @@
+2 2,3,4,5,6,7,8,9;
+6
+10
+14
+18
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-3 b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-3
new file mode 100755
index 0000000..5131560
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique3/part-3
@@ -0,0 +1,5 @@
+3 3,4,5,6,7,8,9;
+7
+11 11,12,13,14,15,16,17,18,19;
+15
+19
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank.result b/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
deleted file mode 100644
index 9c4d83a..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
+++ /dev/null
@@ -1,20 +0,0 @@
-0 0.008290140026154316
-1 0.1535152819247165
-2 0.14646839195826475
-3 0.08125113985998214
-4 0.03976979906329426
-5 0.0225041581462058
-6 0.015736276824953852
-7 0.012542224114863661
-8 0.010628239626209894
-9 0.009294348455354817
-10 0.008290140026154316
-11 0.15351528192471647
-12 0.14646839195826472
-13 0.08125113985998214
-14 0.03976979906329425
-15 0.0225041581462058
-16 0.015736276824953852
-17 0.012542224114863661
-18 0.010628239626209894
-19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-0 b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-0
new file mode 100755
index 0000000..5b07add
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-0
@@ -0,0 +1,10 @@
+0 0.008290140026154316
+4 0.039769799063294246
+8 0.010628239626209894
+12 0.14646839195826478
+16 0.015736276824953852
+20 0.008290140026154316
+24 0.039769799063294246
+28 0.010628239626209894
+32 0.14646839195826478
+36 0.015736276824953852
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-1 b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-1
new file mode 100755
index 0000000..caf8a4c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-1
@@ -0,0 +1,10 @@
+1 0.15351528192471647
+5 0.0225041581462058
+9 0.009294348455354817
+13 0.08125113985998214
+17 0.012542224114863661
+21 0.15351528192471647
+25 0.0225041581462058
+29 0.009294348455354817
+33 0.08125113985998214
+37 0.012542224114863661
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-2 b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-2
new file mode 100755
index 0000000..29a9d52
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-2
@@ -0,0 +1,10 @@
+2 0.14646839195826475
+6 0.015736276824953852
+10 0.008290140026154316
+14 0.03976979906329426
+18 0.010628239626209894
+22 0.14646839195826472
+26 0.01573627682495385
+30 0.008290140026154316
+34 0.03976979906329426
+38 0.010628239626209894
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-3 b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-3
new file mode 100755
index 0000000..c9bfeb8
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRank/part-3
@@ -0,0 +1,10 @@
+3 0.08125113985998214
+7 0.012542224114863663
+11 0.1535152819247165
+15 0.0225041581462058
+19 0.009294348455354817
+23 0.08125113985998211
+27 0.012542224114863661
+31 0.1535152819247165
+35 0.0225041581462058
+39 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
deleted file mode 100644
index 6432eda..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
+++ /dev/null
@@ -1,20 +0,0 @@
-0 0.008290140026154316
-1 0.1535152819247165
-2 0.14646839195826475
-3 0.08125113985998214
-4 0.03976979906329426
-5 0.0225041581462058
-6 0.015736276824953852
-7 0.012542224114863661
-8 0.010628239626209894
-9 0.009294348455354817
-10 0.008290140026154316
-11 0.15351528192471647
-12 0.14646839195826472
-13 0.08125113985998214
-14 0.03976979906329426
-15 0.0225041581462058
-16 0.015736276824953852
-17 0.012542224114863661
-18 0.010628239626209894
-19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-0 b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-0
new file mode 100755
index 0000000..383076e
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-0
@@ -0,0 +1,5 @@
+0 0.008290140026154316
+4 0.03976979906329426
+8 0.010628239626209894
+12 0.14646839195826478
+16 0.015736276824953852
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-1 b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-1
new file mode 100755
index 0000000..0b1a38c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-1
@@ -0,0 +1,5 @@
+1 0.15351528192471653
+5 0.0225041581462058
+9 0.009294348455354817
+13 0.08125113985998214
+17 0.012542224114863661
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-2 b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-2
new file mode 100755
index 0000000..ec995b2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-2
@@ -0,0 +1,5 @@
+2 0.14646839195826478
+6 0.015736276824953852
+10 0.008290140026154316
+14 0.03976979906329426
+18 0.010628239626209894
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-3 b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-3
new file mode 100755
index 0000000..edb7484
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal/part-3
@@ -0,0 +1,5 @@
+3 0.08125113985998214
+7 0.012542224114863661
+11 0.15351528192471653
+15 0.0225041581462058
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
deleted file mode 100644
index 2bd09e1..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
+++ /dev/null
@@ -1,23 +0,0 @@
-0 0.0072088164890121405
-1 0.12352056961948686
-2 0.12045670441668178
-3 0.06798545786459467
-4 0.03387281259892814
-5 0.01942600635480669
-6 0.013661020012182747
-7 0.0109034351563503
-8 0.009241684574402657
-9 0.008082028259564783
-10 0.007208817414047232
-11 0.07555839219845861
-12 0.07249452699565352
-13 0.05063539695954156
-14 0.029644452692487822
-15 0.018670183493927354
-16 0.013558283213067561
-17 0.010892790899883237
-18 0.009240874593661061
-19 0.008081987856433137
-21 0.006521739130434782
-25 0.006521739130434782
-27 0.006521739130434782
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-0 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-0
new file mode 100755
index 0000000..1d26aee2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-0
@@ -0,0 +1,5 @@
+0 0.0072088164890121405
+4 0.03387281259892814
+8 0.009241684574402657
+12 0.07249452699565351
+16 0.013558283213067561
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-1 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-1
new file mode 100755
index 0000000..d013c3e
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-1
@@ -0,0 +1,7 @@
+1 0.12352056961948689
+5 0.01942600635480669
+9 0.008082028259564783
+13 0.050635396959541557
+17 0.010892790899883237
+21 0.006521739130434782
+25 0.006521739130434782
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-2 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-2
new file mode 100755
index 0000000..e8aa0e1
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-2
@@ -0,0 +1,5 @@
+2 0.12045670441668178
+6 0.013661020012182747
+10 0.007208817414047232
+14 0.029644452692487822
+18 0.009240874593661061
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-3 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-3
new file mode 100755
index 0000000..e52970d
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex/part-3
@@ -0,0 +1,6 @@
+3 0.06798545786459467
+7 0.0109034351563503
+11 0.0755583921984586
+15 0.018670183493927354
+19 0.008081987856433137
+27 0.006521739130434782
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
deleted file mode 100644
index 6432eda..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
+++ /dev/null
@@ -1,20 +0,0 @@
-0 0.008290140026154316
-1 0.1535152819247165
-2 0.14646839195826475
-3 0.08125113985998214
-4 0.03976979906329426
-5 0.0225041581462058
-6 0.015736276824953852
-7 0.012542224114863661
-8 0.010628239626209894
-9 0.009294348455354817
-10 0.008290140026154316
-11 0.15351528192471647
-12 0.14646839195826472
-13 0.08125113985998214
-14 0.03976979906329426
-15 0.0225041581462058
-16 0.015736276824953852
-17 0.012542224114863661
-18 0.010628239626209894
-19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-0 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-0
new file mode 100755
index 0000000..383076e
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-0
@@ -0,0 +1,5 @@
+0 0.008290140026154316
+4 0.03976979906329426
+8 0.010628239626209894
+12 0.14646839195826478
+16 0.015736276824953852
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-1 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-1
new file mode 100755
index 0000000..0b1a38c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-1
@@ -0,0 +1,5 @@
+1 0.15351528192471653
+5 0.0225041581462058
+9 0.009294348455354817
+13 0.08125113985998214
+17 0.012542224114863661
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-2 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-2
new file mode 100755
index 0000000..ec995b2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-2
@@ -0,0 +1,5 @@
+2 0.14646839195826478
+6 0.015736276824953852
+10 0.008290140026154316
+14 0.03976979906329426
+18 0.010628239626209894
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-3 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-3
new file mode 100755
index 0000000..5593738
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic/part-3
@@ -0,0 +1,5 @@
+3 0.08125113985998214
+7 0.012542224114863661
+11 0.15351528192471653
+15 0.022504158146205808
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
deleted file mode 100755
index 9a747a6..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
+++ /dev/null
@@ -1,20 +0,0 @@
-0 0.008290140026154316
-1 0.15351528192471647
-2 0.14646839195826475
-3 0.08125113985998211
-4 0.03976979906329425
-5 0.0225041581462058
-6 0.01573627682495385
-7 0.012542224114863661
-8 0.010628239626209894
-9 0.009294348455354817
-10 0.008290140026154316
-11 0.1535152819247165
-12 0.14646839195826475
-13 0.08125113985998214
-14 0.03976979906329426
-15 0.0225041581462058
-16 0.015736276824953852
-17 0.012542224114863661
-18 0.010628239626209894
-19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-0 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-0
new file mode 100755
index 0000000..6fd5f60
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-0
@@ -0,0 +1,5 @@
+0 0.008290140026154316
+4 0.03976979906329425
+8 0.010628239626209894
+12 0.14646839195826475
+16 0.01573627682495385
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-1 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-1
new file mode 100755
index 0000000..0b1a38c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-1
@@ -0,0 +1,5 @@
+1 0.15351528192471653
+5 0.0225041581462058
+9 0.009294348455354817
+13 0.08125113985998214
+17 0.012542224114863661
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-2 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-2
new file mode 100755
index 0000000..69a2803
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-2
@@ -0,0 +1,5 @@
+2 0.14646839195826478
+6 0.015736276824953852
+10 0.008290140026154316
+14 0.03976979906329425
+18 0.010628239626209894
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-3 b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-3
new file mode 100755
index 0000000..7dcb359
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner/part-3
@@ -0,0 +1,5 @@
+3 0.08125113985998214
+7 0.012542224114863661
+11 0.1535152819247165
+15 0.0225041581462058
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
deleted file mode 100644
index a1dfc0f..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
+++ /dev/null
@@ -1,23 +0,0 @@
-0 2
-1 3
-2 1
-3 1
-4 1
-5 1
-6 1
-7 1
-8 1
-9 1
-10 3
-11 2
-12 2
-13 2
-14 2
-15 2
-16 2
-17 2
-18 2
-19 2
-21 0
-25 0
-27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-0 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-0
new file mode 100755
index 0000000..6e3ba89
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-0
@@ -0,0 +1,5 @@
+0 0
+4 1
+8 1
+12 2
+16 2
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-1 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-1
new file mode 100755
index 0000000..2fcc8eb
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-1
@@ -0,0 +1,7 @@
+1 1
+5 1
+9 1
+13 2
+17 2
+21 0
+25 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-2 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-2
new file mode 100755
index 0000000..c93732c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-2
@@ -0,0 +1,5 @@
+2 1
+6 1
+10 3
+14 2
+18 2
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-3 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-3
new file mode 100755
index 0000000..c745349
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex/part-3
@@ -0,0 +1,6 @@
+3 1
+7 1
+11 2
+15 2
+19 2
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
deleted file mode 100644
index 1693fb2..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
+++ /dev/null
@@ -1,23 +0,0 @@
-0 1
-1 1
-2 1
-3 1
-4 1
-5 1
-6 1
-7 1
-8 1
-9 1
-10 1
-11 1
-12 1
-13 1
-14 1
-15 1
-16 1
-17 1
-18 1
-19 1
-21 0
-25 2
-27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-0 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-0
new file mode 100755
index 0000000..8e02e13
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-0
@@ -0,0 +1,5 @@
+0 1
+4 1
+8 1
+12 1
+16 1
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-1 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-1
new file mode 100755
index 0000000..cfbb359
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-1
@@ -0,0 +1,7 @@
+1 1
+5 1
+9 1
+13 1
+17 1
+21 0
+25 2
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-2 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-2
new file mode 100755
index 0000000..8e3ca7c
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-2
@@ -0,0 +1,5 @@
+2 1
+6 1
+10 1
+14 1
+18 1
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-3 b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-3
new file mode 100755
index 0000000..5f83a3d
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity/part-3
@@ -0,0 +1,6 @@
+3 1
+7 1
+11 1
+15 1
+19 1
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
deleted file mode 100644
index 46d1c73..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
+++ /dev/null
@@ -1,20 +0,0 @@
-0 0.0
-1 0.0
-2 100.0
-3 300.0
-4 600.0
-5 1000.0
-6 1500.0
-7 2100.0
-8 2800.0
-9 3600.0
-10 4500.0
-11 5500.0
-12 6600.0
-13 7800.0
-14 9100.0
-15 10500.0
-16 12000.0
-17 13600.0
-18 15300.0
-19 17100.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-0 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-0
new file mode 100755
index 0000000..e8eadcd
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-0
@@ -0,0 +1,10 @@
+0 0.0
+4 600.0
+8 2800.0
+12 6600.0
+16 12000.0
+20 19000.0
+24 27600.0
+28 37800.0
+32 49600.0
+36 63000.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-1 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-1
new file mode 100755
index 0000000..c3d200b
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-1
@@ -0,0 +1,10 @@
+1 0.0
+5 1000.0
+9 3600.0
+13 7800.0
+17 13600.0
+21 21000.0
+25 30000.0
+29 40600.0
+33 52800.0
+37 66600.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-2 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-2
new file mode 100755
index 0000000..779a683
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-2
@@ -0,0 +1,10 @@
+2 100.0
+6 1500.0
+10 4500.0
+14 9100.0
+18 15300.0
+22 23100.0
+26 32500.0
+30 43500.0
+34 56100.0
+38 70300.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-3 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-3
new file mode 100755
index 0000000..57d26c9
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths/part-3
@@ -0,0 +1,10 @@
+3 300.0
+7 2100.0
+11 5500.0
+15 10500.0
+19 17100.0
+23 25300.0
+27 35100.0
+31 46500.0
+35 59500.0
+39 74100.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
deleted file mode 100644
index b42462f..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
+++ /dev/null
@@ -1,20 +0,0 @@
-0 0.0
-1 1.0
-2 2.0
-3 3.0
-4 4.0
-5 5.0
-6 6.0
-7 7.0
-8 8.0
-9 9.0
-10 10.0
-11 11.0
-12 12.0
-13 13.0
-14 14.0
-15 15.0
-16 16.0
-17 17.0
-18 18.0
-19 19.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-0 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-0
new file mode 100755
index 0000000..a1cabe2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-0
@@ -0,0 +1,5 @@
+0 0.0
+4 4.0
+8 8.0
+12 12.0
+16 16.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-1 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-1
new file mode 100755
index 0000000..303ed2a
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-1
@@ -0,0 +1,5 @@
+1 1.0
+5 5.0
+9 9.0
+13 13.0
+17 17.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-2 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-2
new file mode 100755
index 0000000..0020d25
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-2
@@ -0,0 +1,5 @@
+2 2.0
+6 6.0
+10 10.0
+14 14.0
+18 18.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-3 b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-3
new file mode 100755
index 0000000..e2657bd
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal/part-3
@@ -0,0 +1,5 @@
+3 3.0
+7 7.0
+11 11.0
+15 15.0
+19 19.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
deleted file mode 100644
index 4818e13..0000000
--- a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
+++ /dev/null
@@ -1,7 +0,0 @@
-1 3
-2 2
-3 0
-4 0
-5 1
-6 0
-7 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-0.crc b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-0.crc
new file mode 100644
index 0000000..61e7df3
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-0.crc
Binary files differ
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-1.crc b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-1.crc
new file mode 100644
index 0000000..9f34827
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-1.crc
Binary files differ
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-2.crc b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-2.crc
new file mode 100644
index 0000000..cfe7fa2
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-2.crc
Binary files differ
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-3.crc b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-3.crc
new file mode 100644
index 0000000..9959654
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/.part-3.crc
Binary files differ
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-0 b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-0
new file mode 100755
index 0000000..b36d2b5
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-0
@@ -0,0 +1 @@
+4 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-1 b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-1
new file mode 100755
index 0000000..2812c76
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-1
@@ -0,0 +1,2 @@
+1 3
+5 1
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-2 b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-2
new file mode 100755
index 0000000..25cfb7b
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-2
@@ -0,0 +1,2 @@
+2 2
+6 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-3 b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-3
new file mode 100755
index 0000000..2bd8d38
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting/part-3
@@ -0,0 +1,2 @@
+3 0
+7 0
diff --git a/pregelix/pregelix-example/src/test/resources/hyracks-deployment.properties b/pregelix/pregelix-example/src/test/resources/hyracks-deployment.properties
deleted file mode 100644
index 9c42b89..0000000
--- a/pregelix/pregelix-example/src/test/resources/hyracks-deployment.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-#cc.bootstrap.class=edu.uci.ics.asterix.hyracks.bootstrap.CCBootstrapImpl
-nc.bootstrap.class=edu.uci.ics.pregelix.runtime.bootstrap.NCBootstrapImpl
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsReal.xml b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsReal.xml
index 22ae6cf..decbde8 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsReal.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsReal.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>ConnectedComponents</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsRealComplex.xml b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsRealComplex.xml
index 50662f9..c7fec9f 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsRealComplex.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ConnectedComponentsRealComplex.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>ConnectedComponents</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/GraphMutation.xml b/pregelix/pregelix-example/src/test/resources/jobs/GraphMutation.xml
index 9f51f6d..d5ec8f1 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/GraphMutation.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/GraphMutation.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>Graph Mutation</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
index 616c647..c0559d9 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>Maximal Clique</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique2.xml b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique2.xml
new file mode 100644
index 0000000..541806d
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique2.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique2</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Maximal Clique 2</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex$MaximalCliqueVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.TextMaximalCliqueInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique3.xml b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique3.xml
new file mode 100644
index 0000000..7214b3f
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique3.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique3</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Maximal Clique 3</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex$MaximalCliqueVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.TextMaximalCliqueInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
index 744e5b0..65e0b30 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankReal.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankReal.xml
index b51bd98..9e1e0b0 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/PageRankReal.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankReal.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealComplex.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealComplex.xml
index a9e43bd..ceea85b 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealComplex.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealComplex.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
index c1a04ae..c05a4da 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealNoCombiner.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealNoCombiner.xml
index 410ea8b..ac0d508 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealNoCombiner.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealNoCombiner.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplex.xml b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplex.xml
index 0332ec5..225429a 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplex.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplex.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>Reachibility</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplexNoConnectivity.xml b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplexNoConnectivity.xml
index 4f280fc..bd9da92 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplexNoConnectivity.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ReachibilityRealComplexNoConnectivity.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>Reachibility</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
index 9e791e2..9acd7bc 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>ShortestPaths</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPathsReal.xml b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPathsReal.xml
index 90caf6b..6c25575 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPathsReal.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPathsReal.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>ShortestPaths</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
index 0f44f4d..4a40a6a 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
@@ -37,6 +37,7 @@
<property><name>fs.checkpoint.size</name><value>67108864</value></property>
<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
<property><name>mapred.job.name</name><value>Triangle Counting</value></property>
+<property><name>pregelix.nmkComputerClass</name><value>edu.uci.ics.pregelix.example.data.VLongNormalizedKeyComputer</value></property>
<property><name>local.cache.size</name><value>10737418240</value></property>
<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
diff --git a/pregelix/pregelix-runtime/pom.xml b/pregelix/pregelix-runtime/pom.xml
index 2d7b20d..2b56271 100644
--- a/pregelix/pregelix-runtime/pom.xml
+++ b/pregelix/pregelix-runtime/pom.xml
@@ -7,7 +7,7 @@
<parent>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</parent>
@@ -74,82 +74,82 @@
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>pregelix-dataflow</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-api</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-data-std</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-common</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-cc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-control-nc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-ipc</artifactId>
- <version>0.2.5-SNAPSHOT</version>
+ <version>0.2.6-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
</dependency>
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
index 0c09757..caeeb10 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/ComputeUpdateFunctionFactory.java
@@ -109,7 +109,7 @@
@Override
public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
throws HyracksDataException {
- this.conf = confFactory.createConfiguration();
+ this.conf = confFactory.createConfiguration(ctx);
this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
this.aggregator = BspUtils.createGlobalAggregator(conf);
this.aggregator.init();
@@ -249,13 +249,10 @@
if (vertex != null && vertex.hasUpdate()) {
if (!dynamicStateLength) {
// in-place update
- int fieldCount = tupleRef.getFieldCount();
- for (int i = 1; i < fieldCount; i++) {
- byte[] data = tupleRef.getFieldData(i);
- int offset = tupleRef.getFieldStart(i);
- bbos.setByteArray(data, offset);
- vertex.write(output);
- }
+ byte[] data = tupleRef.getFieldData(1);
+ int offset = tupleRef.getFieldStart(1);
+ bbos.setByteArray(data, offset);
+ vertex.write(output);
} else {
// write the vertex id
DataOutput tbOutput = cloneUpdateTb.getDataOutput();
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
index 1bf6a2b..48d4d80 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/function/StartComputeUpdateFunctionFactory.java
@@ -112,7 +112,7 @@
@Override
public void open(IHyracksTaskContext ctx, RecordDescriptor rd, IFrameWriter... writers)
throws HyracksDataException {
- this.conf = confFactory.createConfiguration();
+ this.conf = confFactory.createConfiguration(ctx);
this.dynamicStateLength = BspUtils.getDynamicVertexValueSize(conf);
this.aggregator = BspUtils.createGlobalAggregator(conf);
this.aggregator.init();
@@ -253,13 +253,10 @@
if (vertex != null && vertex.hasUpdate()) {
if (!dynamicStateLength) {
// in-place update
- int fieldCount = tupleRef.getFieldCount();
- for (int i = 1; i < fieldCount; i++) {
- byte[] data = tupleRef.getFieldData(i);
- int offset = tupleRef.getFieldStart(i);
- bbos.setByteArray(data, offset);
- vertex.write(output);
- }
+ byte[] data = tupleRef.getFieldData(1);
+ int offset = tupleRef.getFieldStart(1);
+ bbos.setByteArray(data, offset);
+ vertex.write(output);
} else {
// write the vertex id
DataOutput tbOutput = cloneUpdateTb.getDataOutput();
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AccumulatingAggregatorFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AccumulatingAggregatorFactory.java
index 8f63b6e..851a83a 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AccumulatingAggregatorFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AccumulatingAggregatorFactory.java
@@ -40,7 +40,7 @@
@SuppressWarnings("unchecked")
@Override
- public IAggregatorDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDesc,
+ public IAggregatorDescriptor createAggregator(final IHyracksTaskContext ctx, RecordDescriptor inRecordDesc,
RecordDescriptor outRecordDescriptor, int[] aggKeys, int[] partialKeys) throws HyracksDataException {
return new IAggregatorDescriptor() {
@@ -113,7 +113,7 @@
for (int i = 0; i < agg.length; i++) {
aggOutput[i] = new ArrayBackedValueStorage();
try {
- agg[i] = aggFactories[i].createAggregateFunction(aggOutput[i]);
+ agg[i] = aggFactories[i].createAggregateFunction(ctx, aggOutput[i]);
} catch (Exception e) {
throw new IllegalStateException(e);
}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunction.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunction.java
index 1813dcc..3cf46a2 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunction.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunction.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.io.WritableComparable;
import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
@@ -53,9 +54,9 @@
private MsgList msgList = new MsgList();
private boolean keyRead = false;
- public AggregationFunction(IConfigurationFactory confFactory, DataOutput output, boolean isFinalStage,
- boolean partialAggAsInput) throws HyracksDataException {
- this.conf = confFactory.createConfiguration();
+ public AggregationFunction(IHyracksTaskContext ctx, IConfigurationFactory confFactory, DataOutput output,
+ boolean isFinalStage, boolean partialAggAsInput) throws HyracksDataException {
+ this.conf = confFactory.createConfiguration(ctx);
this.output = output;
this.isFinalStage = isFinalStage;
this.partialAggAsInput = partialAggAsInput;
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunctionFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunctionFactory.java
index a09f688..7ce9e1d 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunctionFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/simpleagg/AggregationFunctionFactory.java
@@ -17,6 +17,7 @@
import java.io.DataOutput;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksException;
import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
import edu.uci.ics.pregelix.dataflow.base.IConfigurationFactory;
@@ -36,8 +37,9 @@
}
@Override
- public IAggregateFunction createAggregateFunction(IDataOutputProvider provider) throws HyracksException {
+ public IAggregateFunction createAggregateFunction(IHyracksTaskContext ctx, IDataOutputProvider provider)
+ throws HyracksException {
DataOutput output = provider.getDataOutput();
- return new AggregationFunction(confFactory, output, isFinalStage, partialAggAsInput);
+ return new AggregationFunction(ctx, confFactory, output, isFinalStage, partialAggAsInput);
}
}
diff --git a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DatatypeHelper.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/DatatypeHelper.java
similarity index 98%
rename from pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DatatypeHelper.java
rename to pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/DatatypeHelper.java
index ee319c6..f9085c4 100644
--- a/pregelix/pregelix-core/src/main/java/edu/uci/ics/pregelix/core/util/DatatypeHelper.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/DatatypeHelper.java
@@ -12,7 +12,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package edu.uci.ics.pregelix.core.util;
+package edu.uci.ics.pregelix.runtime.touchpoint;
import java.io.DataInput;
import java.io.DataOutput;
@@ -80,6 +80,7 @@
throw new HyracksDataException(e);
}
}
+
}
@SuppressWarnings({ "rawtypes", "unchecked" })
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PreSuperStepRuntimeHookFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PreSuperStepRuntimeHookFactory.java
index 5f0ed9e..850ae1e 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PreSuperStepRuntimeHookFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/PreSuperStepRuntimeHookFactory.java
@@ -39,7 +39,7 @@
@Override
public void configure(IHyracksTaskContext ctx) throws HyracksDataException {
- Configuration conf = confFactory.createConfiguration();
+ Configuration conf = confFactory.createConfiguration(ctx);
IterationUtils.setProperties(giraphJobId, ctx, conf);
}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
index c025f85..05b1542 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/RuntimeHookFactory.java
@@ -44,9 +44,10 @@
@Override
public void configure(IHyracksTaskContext ctx) throws HyracksDataException {
- Configuration conf = confFactory.createConfiguration();
+ Configuration conf = confFactory.createConfiguration(ctx);
try {
TaskAttemptContext mapperContext = ctxFactory.createContext(conf, new TaskAttemptID());
+ mapperContext.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
Vertex.setContext(mapperContext);
BspUtils.setDefaultConfiguration(conf);
} catch (Exception e) {
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongAscNormalizedKeyComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongAscNormalizedKeyComputerFactory.java
deleted file mode 100644
index 9181691..0000000
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongAscNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package edu.uci.ics.pregelix.runtime.touchpoint;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-import edu.uci.ics.pregelix.api.util.SerDeUtils;
-
-public class VLongAscNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
- private static final long serialVersionUID = 1L;
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- return new INormalizedKeyComputer() {
- private static final int POSTIVE_LONG_MASK = (3 << 30);
- private static final int NON_NEGATIVE_INT_MASK = (2 << 30);
- private static final int NEGATIVE_LONG_MASK = (0 << 30);
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- long value = SerDeUtils.readVLong(bytes, start, length);
- int highValue = (int) (value >> 32);
- if (highValue > 0) {
- /**
- * larger than Integer.MAX
- */
- int highNmk = getKey(highValue);
- highNmk >>= 2;
- highNmk |= POSTIVE_LONG_MASK;
- return highNmk;
- } else if (highValue == 0) {
- /**
- * smaller than Integer.MAX but >=0
- */
- int lowNmk = (int) value;
- lowNmk >>= 2;
- lowNmk |= NON_NEGATIVE_INT_MASK;
- return lowNmk;
- } else {
- /**
- * less than 0; TODO: have not optimized for that
- */
- int highNmk = getKey(highValue);
- highNmk >>= 2;
- highNmk |= NEGATIVE_LONG_MASK;
- return highNmk;
- }
- }
-
- private int getKey(int value) {
- long unsignedFirstValue = (long) value;
- int nmk = (int) ((unsignedFirstValue - ((long) Integer.MIN_VALUE)) & 0xffffffffL);
- return nmk;
- }
-
- };
- }
-}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongDescNormalizedKeyComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongDescNormalizedKeyComputerFactory.java
deleted file mode 100644
index 6b2738b..0000000
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VLongDescNormalizedKeyComputerFactory.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package edu.uci.ics.pregelix.runtime.touchpoint;
-
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
-import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
-
-public class VLongDescNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
- private static final long serialVersionUID = 1L;
- private final INormalizedKeyComputerFactory ascNormalizedKeyComputerFactory = new VLongAscNormalizedKeyComputerFactory();
-
- @Override
- public INormalizedKeyComputer createNormalizedKeyComputer() {
- return new INormalizedKeyComputer() {
- private INormalizedKeyComputer nmkComputer = ascNormalizedKeyComputerFactory.createNormalizedKeyComputer();
-
- @Override
- public int normalize(byte[] bytes, int start, int length) {
- int nk = nmkComputer.normalize(bytes, start, length);
- return (int) ((long) Integer.MAX_VALUE - (long) (nk - Integer.MIN_VALUE));
- }
-
- };
- }
-}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdNormalizedKeyComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdNormalizedKeyComputerFactory.java
new file mode 100644
index 0000000..04e16ac
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdNormalizedKeyComputerFactory.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputer;
+import edu.uci.ics.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
+import edu.uci.ics.pregelix.api.graph.NormalizedKeyComputer;
+
+/**
+ * This class wraps the user-defined normalized key computer to calculate the normalized
+ * key of vertex ids.
+ *
+ * @author yingyib
+ */
+public class VertexIdNormalizedKeyComputerFactory implements INormalizedKeyComputerFactory {
+ private static final long serialVersionUID = 1L;
+ private Class<? extends NormalizedKeyComputer> nmkComputerClass;
+
+ public VertexIdNormalizedKeyComputerFactory(Class<? extends NormalizedKeyComputer> nmkComputerClass) {
+ this.nmkComputerClass = nmkComputerClass;
+ }
+
+ @Override
+ public INormalizedKeyComputer createNormalizedKeyComputer() {
+ try {
+ final NormalizedKeyComputer nmkComputer = nmkComputerClass.newInstance();
+ return new INormalizedKeyComputer() {
+
+ @Override
+ public int normalize(byte[] bytes, int start, int length) {
+ return nmkComputer.getNormalizedKey(bytes, start, length);
+ }
+
+ };
+ } catch (Exception e) {
+ throw new IllegalStateException(e);
+ }
+ }
+}
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdPartitionComputerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdPartitionComputerFactory.java
index 5eff497..5b4b1f0 100644
--- a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdPartitionComputerFactory.java
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/VertexIdPartitionComputerFactory.java
@@ -24,20 +24,22 @@
import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import edu.uci.ics.pregelix.dataflow.std.base.ISerializerDeserializerFactory;
public class VertexIdPartitionComputerFactory<K extends Writable, V extends Writable> implements
ITuplePartitionComputerFactory {
private static final long serialVersionUID = 1L;
- private final ISerializerDeserializer<K> keyIO;
+ private final ISerializerDeserializerFactory<K> keyIOFactory;
- public VertexIdPartitionComputerFactory(ISerializerDeserializer<K> keyIO) {
- this.keyIO = keyIO;
+ public VertexIdPartitionComputerFactory(ISerializerDeserializerFactory<K> keyIOFactory) {
+ this.keyIOFactory = keyIOFactory;
}
public ITuplePartitionComputer createPartitioner() {
return new ITuplePartitionComputer() {
private final ByteBufferInputStream bbis = new ByteBufferInputStream();
private final DataInputStream dis = new DataInputStream(bbis);
+ private final ISerializerDeserializer<K> keyIO = keyIOFactory.getSerializerDeserializer();
public int partition(IFrameTupleAccessor accessor, int tIndex, int nParts) throws HyracksDataException {
int keyStart = accessor.getTupleStartOffset(tIndex) + accessor.getFieldSlotsLength()
diff --git a/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/WritableSerializerDeserializerFactory.java b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/WritableSerializerDeserializerFactory.java
new file mode 100644
index 0000000..96f781c
--- /dev/null
+++ b/pregelix/pregelix-runtime/src/main/java/edu/uci/ics/pregelix/runtime/touchpoint/WritableSerializerDeserializerFactory.java
@@ -0,0 +1,21 @@
+package edu.uci.ics.pregelix.runtime.touchpoint;
+
+import org.apache.hadoop.io.Writable;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.pregelix.dataflow.std.base.ISerializerDeserializerFactory;
+
+public class WritableSerializerDeserializerFactory<T extends Writable> implements ISerializerDeserializerFactory<T> {
+ private static final long serialVersionUID = 1L;
+ private final Class<T> clazz;
+
+ public WritableSerializerDeserializerFactory(Class<T> clazz) {
+ this.clazz = clazz;
+ }
+
+ @SuppressWarnings({ "rawtypes", "unchecked" })
+ @Override
+ public ISerializerDeserializer getSerializerDeserializer() {
+ return DatatypeHelper.createSerializerDeserializer(clazz);
+ }
+}