[ASTERIXDB-2256] Reformat sources using code format template
Change-Id: I4faa141c1a8c9700d5e9ac50b839acc9d1eede73
Reviewed-on: https://asterix-gerrit.ics.uci.edu/2310
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Contrib: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
diff --git a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
index 64e328a..d1feb08 100644
--- a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
+++ b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
@@ -108,8 +108,8 @@
@Override
public String getMessage() {
if (msgCache == null) {
- msgCache = new CachedMessage(
- ErrorMessageUtil.formatMessage(component, errorCode, super.getMessage(), params));
+ msgCache =
+ new CachedMessage(ErrorMessageUtil.formatMessage(component, errorCode, super.getMessage(), params));
}
return msgCache.message;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java b/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java
index 7328278..29c178a 100644
--- a/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java
+++ b/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java
@@ -59,8 +59,7 @@
if (ruleCollection instanceof List) {
rules = (List<IAlgebraicRewriteRule>) ruleCollection;
} else {
- throw AlgebricksException.create(ErrorCode.RULECOLLECTION_NOT_INSTANCE_OF_LIST,
- this.getClass().getName());
+ throw AlgebricksException.create(ErrorCode.RULECOLLECTION_NOT_INSTANCE_OF_LIST, this.getClass().getName());
}
if (rules.isEmpty()) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
index 1012fef..663661c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
@@ -46,7 +46,8 @@
/**
* inputs are numbered starting from 0
*/
- public void contributeGraphEdge(ILogicalOperator src, int srcOutputIndex, ILogicalOperator dest, int destInputIndex);
+ public void contributeGraphEdge(ILogicalOperator src, int srcOutputIndex, ILogicalOperator dest,
+ int destInputIndex);
public void contributeConnector(ILogicalOperator exchgOp, IConnectorDescriptor conn);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
index 707a7db..dd7e065 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
@@ -65,7 +65,7 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException;
+ throws AlgebricksException;
// variables
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
index 8c0ab2f..2a92aba 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
@@ -53,7 +53,7 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException;
+ throws AlgebricksException;
public void disableJobGenBelowMe();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
index 58f4c60..45b7edc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
@@ -19,5 +19,7 @@
package org.apache.hyracks.algebricks.core.algebra.base;
public enum LogicalExpressionTag {
- FUNCTION_CALL, VARIABLE, CONSTANT
+ FUNCTION_CALL,
+ VARIABLE,
+ CONSTANT
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java
index 31726d2..71f7b52 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java
@@ -44,7 +44,7 @@
@Override
public IAggregateEvaluatorFactory createAggregateFunctionFactory(AggregateFunctionCallExpression expr,
IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
return lejg.createAggregateFunctionFactory(expr, env, inputSchemas, context);
}
@@ -58,14 +58,14 @@
@Override
public IRunningAggregateEvaluatorFactory createRunningAggregateFunctionFactory(StatefulFunctionCallExpression expr,
IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
return lejg.createRunningAggregateFunctionFactory(expr, env, inputSchemas, context);
}
@Override
public IUnnestingEvaluatorFactory createUnnestingFunctionFactory(UnnestingFunctionCallExpression expr,
IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
return lejg.createUnnestingFunctionFactory(expr, env, inputSchemas, context);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
index 2816477..d022bff 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
@@ -24,6 +24,6 @@
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
public interface IMergeAggregationExpressionFactory {
- ILogicalExpression createMergeAggregation(LogicalVariable originalAggVariable, ILogicalExpression expr, IOptimizationContext env)
- throws AlgebricksException;
+ ILogicalExpression createMergeAggregation(LogicalVariable originalAggVariable, ILogicalExpression expr,
+ IOptimizationContext env) throws AlgebricksException;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
index 07e4f98..2da7cf3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
@@ -43,8 +43,8 @@
// booleans
public final static FunctionIdentifier NOT = new FunctionIdentifier(ALGEBRICKS_NS, "not", 1);
- public final static FunctionIdentifier AND = new FunctionIdentifier(ALGEBRICKS_NS, "and",
- FunctionIdentifier.VARARGS);
+ public final static FunctionIdentifier AND =
+ new FunctionIdentifier(ALGEBRICKS_NS, "and", FunctionIdentifier.VARARGS);
public final static FunctionIdentifier OR = new FunctionIdentifier(ALGEBRICKS_NS, "or", FunctionIdentifier.VARARGS);
// numerics
@@ -56,7 +56,8 @@
// nulls
public final static FunctionIdentifier IS_NULL = new FunctionIdentifier(ALGEBRICKS_NS, "is-null", 1);
- private static final Map<FunctionIdentifier, ComparisonKind> comparisonFunctions = new HashMap<FunctionIdentifier, ComparisonKind>();
+ private static final Map<FunctionIdentifier, ComparisonKind> comparisonFunctions =
+ new HashMap<FunctionIdentifier, ComparisonKind>();
static {
comparisonFunctions.put(AlgebricksBuiltinFunctions.EQ, ComparisonKind.EQ);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
index 5d6f40c..c163c9f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
@@ -40,7 +40,6 @@
return variables;
}
-
public void setVariables(List<LogicalVariable> variables) {
this.variables = variables;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
index c58dd67..b4a59a8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
@@ -98,13 +98,13 @@
@Override
public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
- IVariableTypeEnvironment env = new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(),
- ctx.getMetadataProvider());
+ IVariableTypeEnvironment env =
+ new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMetadataProvider());
IVariableTypeEnvironment env2 = ctx.getOutputTypeEnvironment(inputs.get(0).getValue());
int n = variables.size();
for (int i = 0; i < n; i++) {
- Object t = ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(),
- ctx.getMetadataProvider(), env2);
+ Object t = ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(), ctx.getMetadataProvider(),
+ env2);
env.setVarType(variables.get(i), t);
}
return env;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
index ef1760b..861d74c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
@@ -92,8 +92,8 @@
env.setVarType(variables.get(i), ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(),
ctx.getMetadataProvider(), env));
if (expressions.get(i).getValue().getExpressionTag() == LogicalExpressionTag.VARIABLE) {
- LogicalVariable var = ((VariableReferenceExpression) expressions.get(i).getValue())
- .getVariableReference();
+ LogicalVariable var =
+ ((VariableReferenceExpression) expressions.get(i).getValue()).getVariableReference();
for (List<LogicalVariable> list : env.getCorrelatedMissableVariableLists()) {
if (list.contains(var)) {
list.add(variables.get(i));
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
index aa694b8..365d77e 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
@@ -296,9 +296,8 @@
// The groupAll flag can only be set if group by columns are empty.
private void checkGroupAll(boolean groupAll) {
if (groupAll && !gByList.isEmpty()) {
- throw new IllegalStateException(
- "Conflicting parameters for GROUP BY: there should be no GROUP BY keys "
- + "when the GROUP ALL flag is set to true");
+ throw new IllegalStateException("Conflicting parameters for GROUP BY: there should be no GROUP BY keys "
+ + "when the GROUP ALL flag is set to true");
}
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java
index 02765f1..31a1294 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java
@@ -181,8 +181,8 @@
return prevAdditionalFilteringExpression;
}
- public void
- setBeforeOpAdditionalFilteringExpression(Mutable<ILogicalExpression> prevAdditionalFilteringExpression) {
+ public void setBeforeOpAdditionalFilteringExpression(
+ Mutable<ILogicalExpression> prevAdditionalFilteringExpression) {
this.prevAdditionalFilteringExpression = prevAdditionalFilteringExpression;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
index 0595ff8..8a06ec4 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
@@ -155,8 +155,8 @@
Object expectedType = expected.getVarType(expectedVariables.get(i));
Object actualType = actual.getVarType(actualVariables.get(i));
if (!expectedType.equals(actualType)) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.warn(
- "Type of two variables are not equal." + expectedVariables.get(i) + " is of type: "
+ AlgebricksConfig.ALGEBRICKS_LOGGER
+ .warn("Type of two variables are not equal." + expectedVariables.get(i) + " is of type: "
+ expectedType + actualVariables.get(i) + " is of type: " + actualType);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
index 2ae27f1..797c5eb 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
@@ -64,9 +64,9 @@
for (int i = 0; i < n; i++) {
envPointers[i] = new OpRefTypeEnvPointer(inputs.get(i), ctx);
}
- PropagatingTypeEnvironment env = new PropagatingTypeEnvironment(ctx.getExpressionTypeComputer(),
- ctx.getMissableTypeComputer(), ctx.getMetadataProvider(), TypePropagationPolicy.LEFT_OUTER,
- envPointers);
+ PropagatingTypeEnvironment env =
+ new PropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMissableTypeComputer(),
+ ctx.getMetadataProvider(), TypePropagationPolicy.LEFT_OUTER, envPointers);
List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(inputs.get(1).getValue(), liveVars); // live variables from outer branch can be null together
env.getCorrelatedMissableVariableLists().add(liveVars);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java
index 6c10d60..9e35885 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java
@@ -43,7 +43,8 @@
}
@Override
- public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
+ public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
index c2e244b..ef16613 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
@@ -72,10 +72,8 @@
IVariableTypeEnvironment env = createPropagatingAllInputsTypeEnvironment(ctx);
int n = variables.size();
for (int i = 0; i < n; i++) {
- env.setVarType(
- variables.get(i),
- ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(), ctx.getMetadataProvider(),
- env));
+ env.setVarType(variables.get(i), ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(),
+ ctx.getMetadataProvider(), env));
}
return env;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
index b04b28c..6fb767c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
@@ -99,8 +99,8 @@
@Override
public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
- IVariableTypeEnvironment env = new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(),
- ctx.getMetadataProvider());
+ IVariableTypeEnvironment env =
+ new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMetadataProvider());
for (Pair<LogicalVariable, Object> p : scriptDesc.getVarTypePairs()) {
env.setVarType(p.first, p.second);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java
index c69ead7..d61ad07 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java
@@ -50,12 +50,9 @@
private final List<Object> tokenizeVarTypes;
private List<Mutable<ILogicalExpression>> additionalFilteringExpressions;
- public TokenizeOperator(IDataSourceIndex<?, ?> dataSourceIndex,
- List<Mutable<ILogicalExpression>> primaryKeyExprs,
- List<Mutable<ILogicalExpression>> secondaryKeyExprs,
- List<LogicalVariable> tokenizeVars,
- Mutable<ILogicalExpression> filterExpr, Kind operation,
- boolean bulkload, boolean isPartitioned,
+ public TokenizeOperator(IDataSourceIndex<?, ?> dataSourceIndex, List<Mutable<ILogicalExpression>> primaryKeyExprs,
+ List<Mutable<ILogicalExpression>> secondaryKeyExprs, List<LogicalVariable> tokenizeVars,
+ Mutable<ILogicalExpression> filterExpr, Kind operation, boolean bulkload, boolean isPartitioned,
List<Object> tokenizeVarTypes) {
this.dataSourceIndex = dataSourceIndex;
this.primaryKeyExprs = primaryKeyExprs;
@@ -76,9 +73,7 @@
}
@Override
- public boolean acceptExpressionTransform(
- ILogicalExpressionReferenceTransform visitor)
- throws AlgebricksException {
+ public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform visitor) throws AlgebricksException {
boolean b = false;
for (int i = 0; i < primaryKeyExprs.size(); i++) {
if (visitor.transform(primaryKeyExprs.get(i))) {
@@ -94,8 +89,7 @@
}
@Override
- public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg)
- throws AlgebricksException {
+ public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
return visitor.visitTokenizeOperator(this, arg);
}
@@ -109,8 +103,8 @@
return new VariablePropagationPolicy() {
@Override
- public void propagateVariables(IOperatorSchema target,
- IOperatorSchema... sources) throws AlgebricksException {
+ public void propagateVariables(IOperatorSchema target, IOperatorSchema... sources)
+ throws AlgebricksException {
target.addAllVariables(sources[0]);
for (LogicalVariable v : tokenizeVars) {
target.addVariable(v);
@@ -126,8 +120,7 @@
}
@Override
- public IVariableTypeEnvironment computeOutputTypeEnvironment(
- ITypingContext ctx) throws AlgebricksException {
+ public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
IVariableTypeEnvironment env = createPropagatingAllInputsTypeEnvironment(ctx);
// If the secondary index is not length-partitioned, create one new
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
index b1ca744..3a0068d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
@@ -35,7 +35,8 @@
}
@Override
- public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
+ public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform)
+ throws AlgebricksException {
// TODO Auto-generated method stub
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
index 0c53685..fb1bcec 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -77,7 +77,8 @@
public class IsomorphismOperatorVisitor implements ILogicalOperatorVisitor<Boolean, ILogicalOperator> {
- private final Map<LogicalVariable, LogicalVariable> variableMapping = new HashMap<LogicalVariable, LogicalVariable>();
+ private final Map<LogicalVariable, LogicalVariable> variableMapping =
+ new HashMap<LogicalVariable, LogicalVariable>();
public IsomorphismOperatorVisitor() {
}
@@ -89,9 +90,9 @@
return Boolean.FALSE;
}
AggregateOperator aggOpArg = (AggregateOperator) copyAndSubstituteVar(op, arg);
- boolean isomorphic = VariableUtilities.varListEqualUnordered(
- getPairList(op.getVariables(), op.getExpressions()),
- getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
+ boolean isomorphic =
+ VariableUtilities.varListEqualUnordered(getPairList(op.getVariables(), op.getExpressions()),
+ getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
return isomorphic;
}
@@ -103,9 +104,9 @@
return Boolean.FALSE;
}
RunningAggregateOperator aggOpArg = (RunningAggregateOperator) copyAndSubstituteVar(op, arg);
- boolean isomorphic = VariableUtilities.varListEqualUnordered(
- getPairList(op.getVariables(), op.getExpressions()),
- getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
+ boolean isomorphic =
+ VariableUtilities.varListEqualUnordered(getPairList(op.getVariables(), op.getExpressions()),
+ getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
return isomorphic;
}
@@ -142,8 +143,10 @@
GroupByOperator gbyOpArg = (GroupByOperator) copyAndSubstituteVar(op, arg);
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyListsArg = gbyOpArg.getGroupByList();
- List<Pair<LogicalVariable, ILogicalExpression>> listLeft = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
- List<Pair<LogicalVariable, ILogicalExpression>> listRight = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+ List<Pair<LogicalVariable, ILogicalExpression>> listLeft =
+ new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+ List<Pair<LogicalVariable, ILogicalExpression>> listRight =
+ new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : keyLists) {
listLeft.add(new Pair<LogicalVariable, ILogicalExpression>(pair.first, pair.second.getValue()));
@@ -249,9 +252,9 @@
return Boolean.FALSE;
}
AssignOperator assignOpArg = (AssignOperator) copyAndSubstituteVar(op, arg);
- boolean isomorphic = VariableUtilities.varListEqualUnordered(
- getPairList(op.getVariables(), op.getExpressions()),
- getPairList(assignOpArg.getVariables(), assignOpArg.getExpressions()));
+ boolean isomorphic =
+ VariableUtilities.varListEqualUnordered(getPairList(op.getVariables(), op.getExpressions()),
+ getPairList(assignOpArg.getVariables(), assignOpArg.getExpressions()));
return isomorphic;
}
@@ -659,7 +662,8 @@
public List<Pair<LogicalVariable, ILogicalExpression>> getPairList(List<LogicalVariable> vars,
List<Mutable<ILogicalExpression>> exprs) throws AlgebricksException {
- List<Pair<LogicalVariable, ILogicalExpression>> list = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+ List<Pair<LogicalVariable, ILogicalExpression>> list =
+ new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
if (vars.size() != exprs.size()) {
throw new AlgebricksException("variable list size does not equal to expression list size ");
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
index a75e7d6..74afdf5 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
@@ -203,8 +203,8 @@
private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderExpressionReferencePairList(
List<Pair<IOrder, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
- ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>(
- list.size());
+ ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>> listCopy =
+ new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>(list.size());
for (Pair<IOrder, Mutable<ILogicalExpression>> pair : list) {
listCopy.add(new Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>(deepCopyOrder(pair.first),
exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
@@ -248,8 +248,8 @@
private List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> deepCopyVariableExpressionReferencePairList(
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(
- list.size());
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> listCopy =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(list.size());
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : list) {
listCopy.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(deepCopyVariable(pair.first),
exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
@@ -317,8 +317,8 @@
@Override
public ILogicalOperator visitDataScanOperator(DataSourceScanOperator op, ILogicalOperator arg)
throws AlgebricksException {
- DataSourceScanOperator opCopy = new DataSourceScanOperator(deepCopyVariableList(op.getVariables()),
- op.getDataSource());
+ DataSourceScanOperator opCopy =
+ new DataSourceScanOperator(deepCopyVariableList(op.getVariables()), op.getDataSource());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@@ -326,8 +326,8 @@
@Override
public ILogicalOperator visitDistinctOperator(DistinctOperator op, ILogicalOperator arg)
throws AlgebricksException {
- DistinctOperator opCopy = new DistinctOperator(
- exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
+ DistinctOperator opCopy =
+ new DistinctOperator(exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@@ -349,10 +349,10 @@
@Override
public ILogicalOperator visitGroupByOperator(GroupByOperator op, ILogicalOperator arg) throws AlgebricksException {
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByListCopy = deepCopyVariableExpressionReferencePairList(
- op.getGroupByList());
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorListCopy = deepCopyVariableExpressionReferencePairList(
- op.getDecorList());
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByListCopy =
+ deepCopyVariableExpressionReferencePairList(op.getGroupByList());
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorListCopy =
+ deepCopyVariableExpressionReferencePairList(op.getDecorList());
List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
GroupByOperator opCopy = new GroupByOperator(groupByListCopy, decorListCopy, nestedPlansCopy, op.isGroupAll());
@@ -364,10 +364,10 @@
@Override
public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg)
throws AlgebricksException {
- InnerJoinOperator opCopy = new InnerJoinOperator(
- exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
- deepCopyOperatorReference(op.getInputs().get(0), arg),
- deepCopyOperatorReference(op.getInputs().get(1), arg));
+ InnerJoinOperator opCopy =
+ new InnerJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
+ deepCopyOperatorReference(op.getInputs().get(0), arg),
+ deepCopyOperatorReference(op.getInputs().get(1), arg));
copyAnnotations(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
return opCopy;
@@ -376,10 +376,10 @@
@Override
public ILogicalOperator visitLeftOuterJoinOperator(LeftOuterJoinOperator op, ILogicalOperator arg)
throws AlgebricksException {
- LeftOuterJoinOperator opCopy = new LeftOuterJoinOperator(
- exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
- deepCopyOperatorReference(op.getInputs().get(0), arg),
- deepCopyOperatorReference(op.getInputs().get(1), arg));
+ LeftOuterJoinOperator opCopy =
+ new LeftOuterJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
+ deepCopyOperatorReference(op.getInputs().get(0), arg),
+ deepCopyOperatorReference(op.getInputs().get(1), arg));
copyAnnotations(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
return opCopy;
@@ -396,8 +396,8 @@
@Override
public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
throws AlgebricksException {
- Mutable<ILogicalOperator> dataSourceReference = arg == null ? op.getDataSourceReference()
- : new MutableObject<>(arg);
+ Mutable<ILogicalOperator> dataSourceReference =
+ arg == null ? op.getDataSourceReference() : new MutableObject<>(arg);
NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(dataSourceReference);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
@@ -495,8 +495,8 @@
int index = 0;
for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple : op.getVariableMappings()) {
LogicalVariable producedVar = deepCopyVariable(triple.third);
- Triple<LogicalVariable, LogicalVariable, LogicalVariable> copiedTriple = new Triple<>(
- liveVarsInLeftInput.get(index), liveVarsInRightInput.get(index), producedVar);
+ Triple<LogicalVariable, LogicalVariable, LogicalVariable> copiedTriple =
+ new Triple<>(liveVarsInLeftInput.get(index), liveVarsInRightInput.get(index), producedVar);
copiedTriples.add(copiedTriple);
++index;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
index 7543e5f..600714b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
@@ -369,8 +369,8 @@
return newObjs;
}
- private List<Pair<IOrder, Mutable<ILogicalExpression>>>
- deepCopyOrderAndExpression(List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
+ private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderAndExpression(
+ List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs = new ArrayList<>();
for (Pair<IOrder, Mutable<ILogicalExpression>> pair : ordersAndExprs) {
newOrdersAndExprs.add(new Pair<>(pair.first, deepCopyExpressionRef(pair.second)));
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
index 7221e81..69fe746 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
@@ -139,7 +139,7 @@
public static void substituteVariables(ILogicalOperator op,
List<Pair<LogicalVariable, LogicalVariable>> oldVarNewVarMapHistory, ITypingContext ctx)
- throws AlgebricksException {
+ throws AlgebricksException {
for (Pair<LogicalVariable, LogicalVariable> entry : oldVarNewVarMapHistory) {
VariableUtilities.substituteVariables(op, entry.first, entry.second, ctx);
}
@@ -165,8 +165,8 @@
public static void substituteVariables(ILogicalOperator op, LogicalVariable v1, LogicalVariable v2,
boolean goThroughNts, ITypingContext ctx) throws AlgebricksException {
- ILogicalOperatorVisitor<Void, Pair<LogicalVariable, LogicalVariable>> visitor = new SubstituteVariableVisitor(
- goThroughNts, ctx);
+ ILogicalOperatorVisitor<Void, Pair<LogicalVariable, LogicalVariable>> visitor =
+ new SubstituteVariableVisitor(goThroughNts, ctx);
op.accept(visitor, new Pair<LogicalVariable, LogicalVariable>(v1, v2));
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
index 799a6af..6f8d5e9 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
@@ -33,8 +33,8 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
- Pair<IConnectorDescriptor, TargetConstraint> connPair = createConnectorDescriptor(builder.getJobSpec(), op,
- opSchema, context);
+ Pair<IConnectorDescriptor, TargetConstraint> connPair =
+ createConnectorDescriptor(builder.getJobSpec(), op, opSchema, context);
builder.contributeConnectorWithTargetConstraint(op, connPair.first, connPair.second);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
index f6a1bc4..6a81005 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
@@ -96,10 +96,10 @@
switch (partitioningType) {
case PAIRWISE:
pp1 = new UnorderedPartitionedProperty(new ListSet<>(keysLeftBranch),
- context.getComputationNodeDomain());
+ context.getComputationNodeDomain());
pp2 = new UnorderedPartitionedProperty(new ListSet<>(keysRightBranch),
- context.getComputationNodeDomain());
- break;
+ context.getComputationNodeDomain());
+ break;
case BROADCAST:
pp1 = new RandomPartitioningProperty(context.getComputationNodeDomain());
pp2 = new BroadcastPartitioningProperty(context.getComputationNodeDomain());
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
index 4afcbc8..aea9b3e 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
@@ -25,7 +25,8 @@
public abstract class AbstractJoinPOperator extends AbstractPhysicalOperator {
public enum JoinPartitioningType {
- PAIRWISE, BROADCAST
+ PAIRWISE,
+ BROADCAST
}
protected final JoinKind kind;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
index 0fb667a..43cde22 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
@@ -72,8 +72,8 @@
}
protected PhysicalRequirements emptyUnaryRequirements() {
- StructuralPropertiesVector[] req = new StructuralPropertiesVector[] {
- StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR };
+ StructuralPropertiesVector[] req =
+ new StructuralPropertiesVector[] { StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR };
return new PhysicalRequirements(req, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
@@ -117,7 +117,7 @@
protected AlgebricksPipeline[] compileSubplans(IOperatorSchema outerPlanSchema,
AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
AlgebricksPipeline[] subplans = new AlgebricksPipeline[npOp.getNestedPlans().size()];
PlanCompiler pc = new PlanCompiler(context);
int i = 0;
@@ -129,7 +129,7 @@
private AlgebricksPipeline buildPipelineWithProjection(ILogicalPlan p, IOperatorSchema outerPlanSchema,
AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, PlanCompiler pc)
- throws AlgebricksException {
+ throws AlgebricksException {
if (p.getRoots().size() > 1) {
throw new NotImplementedException("Nested plans with several roots are not supported.");
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
index 75970ac..64e50ed 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
@@ -155,7 +155,6 @@
Set<LogicalVariable> gbvars = new ListSet<>(columnList);
LocalGroupingProperty groupProp = new LocalGroupingProperty(gbvars, new ArrayList<>(columnList));
-
boolean goon = true;
for (ILogicalPlan p : gby.getNestedPlans()) {
// try to propagate secondary order requirements from nested
@@ -232,8 +231,7 @@
tl.add(((VariableReferenceExpression) decorPair.second.getValue()).getVariableReference());
fdList.add(new FunctionalDependency(hd, tl));
}
- if (allOk && PropertiesUtil.matchLocalProperties(localProps, props,
- new HashMap<>(), fdList)) {
+ if (allOk && PropertiesUtil.matchLocalProperties(localProps, props, new HashMap<>(), fdList)) {
localProps = props;
}
}
@@ -242,8 +240,7 @@
IPartitioningProperty pp = null;
AbstractLogicalOperator aop = (AbstractLogicalOperator) op;
if (aop.getExecutionMode() == ExecutionMode.PARTITIONED) {
- pp = new UnorderedPartitionedProperty(new ListSet<>(columnList),
- context.getComputationNodeDomain());
+ pp = new UnorderedPartitionedProperty(new ListSet<>(columnList), context.getComputationNodeDomain());
}
pv[0] = new StructuralPropertiesVector(pp, localProps);
return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
index f16f49a..147d5cc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
@@ -60,11 +60,11 @@
AggregateOperator aggOp = (AggregateOperator) op;
ILogicalOperator op2 = op.getInputs().get(0).getValue();
if (aggOp.getExecutionMode() != AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
- deliveredProperties = new StructuralPropertiesVector(op2.getDeliveredPhysicalProperties()
- .getPartitioningProperty(), new ArrayList<>());
+ deliveredProperties = new StructuralPropertiesVector(
+ op2.getDeliveredPhysicalProperties().getPartitioningProperty(), new ArrayList<>());
} else {
- deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED,
- new ArrayList<>());
+ deliveredProperties =
+ new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, new ArrayList<>());
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
index 5aed63e..995f6e0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
@@ -88,14 +88,14 @@
// TODO push projections into the operator
int[] projectionList = JobGenHelper.projectAllVariables(opSchema);
- AssignRuntimeFactory runtime = new AssignRuntimeFactory(outColumns, evalFactories, projectionList,
- flushFramesRapidly);
+ AssignRuntimeFactory runtime =
+ new AssignRuntimeFactory(outColumns, evalFactories, projectionList, flushFramesRapidly);
// contribute one Asterix framewriter
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
if (cardinalityConstraint > 0) {
- AlgebricksCountPartitionConstraint countConstraint = new AlgebricksCountPartitionConstraint(
- cardinalityConstraint);
+ AlgebricksCountPartitionConstraint countConstraint =
+ new AlgebricksCountPartitionConstraint(cardinalityConstraint);
builder.contributeMicroOperator(assign, runtime, recDesc, countConstraint);
} else {
builder.contributeMicroOperator(assign, runtime, recDesc);
@@ -119,7 +119,6 @@
this.cardinalityConstraint = cardinality;
}
-
@Override
public boolean expensiveThanMaterialization() {
return false;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java
index dda5456..2204637 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java
@@ -76,8 +76,8 @@
List<LogicalVariable> scanVariables = new ArrayList<>();
scanVariables.addAll(primaryKeys);
scanVariables.add(new LogicalVariable(-1));
- IPhysicalPropertiesVector physicalProps = dataSource.getPropertiesProvider()
- .computePropertiesVector(scanVariables);
+ IPhysicalPropertiesVector physicalProps =
+ dataSource.getPropertiesProvider().computePropertiesVector(scanVariables);
StructuralPropertiesVector spv = new StructuralPropertiesVector(physicalProps.getPartitioningProperty(),
physicalProps.getLocalProperties());
return new PhysicalRequirements(new IPhysicalPropertiesVector[] { spv },
@@ -95,7 +95,7 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
+ throws AlgebricksException {
InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op;
assert insertDeleteOp.getOperation() == Kind.INSERT;
assert insertDeleteOp.isBulkload();
@@ -104,9 +104,9 @@
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getInsertRuntime(dataSource,
- propagatedSchema, typeEnv, primaryKeys, payload, additionalFilteringKeys, additionalNonFilterVars,
- inputDesc, context, spec, true);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, primaryKeys, payload,
+ additionalFilteringKeys, additionalNonFilterVars, inputDesc, context, spec, true);
builder.contributeHyracksOperator(insertDeleteOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
index 4a5ac5a..1421cef 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
@@ -109,9 +109,9 @@
List<LogicalVariable> vars = scan.getVariables();
List<LogicalVariable> projectVars = scan.getProjectVariables();
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = mp.getScannerRuntime(dataSource, vars,
- projectVars, scan.isProjectPushed(), scan.getMinFilterVars(), scan.getMaxFilterVars(), opSchema,
- typeEnv, context, builder.getJobSpec(), implConfig);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p =
+ mp.getScannerRuntime(dataSource, vars, projectVars, scan.isProjectPushed(), scan.getMinFilterVars(),
+ scan.getMaxFilterVars(), opSchema, typeEnv, context, builder.getJobSpec(), implConfig);
builder.contributeHyracksOperator(scan, p.first);
if (p.second != null) {
builder.contributeAlgebricksPartitionConstraint(p.first, p.second);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
index b3e8385..178f2a1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
@@ -100,11 +100,11 @@
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- IPrinterFactory[] pf = JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op),
- context, columns);
+ IPrinterFactory[] pf =
+ JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op), context, columns);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getResultHandleRuntime(
- resultOp.getDataSink(), columns, pf, inputDesc, true, spec);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getResultHandleRuntime(resultOp.getDataSink(), columns, pf, inputDesc, true, spec);
builder.contributeHyracksOperator(resultOp, runtimeAndConstraints.first);
ILogicalOperator src = resultOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
index eecd066..5ee967d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
@@ -148,7 +148,7 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
+ throws AlgebricksException {
List<LogicalVariable> gbyCols = getGbyColumns();
int keys[] = JobGenHelper.variablesToFieldIndexes(gbyCols, inputSchemas[0]);
GroupByOperator gby = (GroupByOperator) op;
@@ -221,20 +221,20 @@
}
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(gbyCols,
- aggOpInputEnv, context);
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
- IBinaryHashFunctionFamily[] hashFunctionFactories = JobGenHelper.variablesToBinaryHashFunctionFamilies(gbyCols,
- aggOpInputEnv, context);
+ IBinaryComparatorFactory[] comparatorFactories =
+ JobGenHelper.variablesToAscBinaryComparatorFactories(gbyCols, aggOpInputEnv, context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
+ IBinaryHashFunctionFamily[] hashFunctionFactories =
+ JobGenHelper.variablesToBinaryHashFunctionFamilies(gbyCols, aggOpInputEnv, context);
ISerializedAggregateEvaluatorFactory[] merges = new ISerializedAggregateEvaluatorFactory[n];
List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
IOperatorSchema[] localInputSchemas = new IOperatorSchema[1];
localInputSchemas[0] = new OperatorSchemaImpl();
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression aggFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
aggFun.getUsedVariables(usedVars);
}
i = 0;
@@ -248,16 +248,16 @@
localInputSchemas[0].addVariable(usedVar);
}
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression mergeFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression mergeFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
merges[i] = expressionRuntimeProvider.createSerializableAggregateFunctionFactory(mergeFun, aggOpInputEnv,
localInputSchemas, context);
}
IAggregatorDescriptorFactory aggregatorFactory = new SerializableAggregatorDescriptorFactory(aff);
IAggregatorDescriptorFactory mergeFactory = new SerializableAggregatorDescriptorFactory(merges);
- INormalizedKeyComputerFactory normalizedKeyFactory = JobGenHelper
- .variablesToAscNormalizedKeyComputerFactory(gbyCols, aggOpInputEnv, context);
+ INormalizedKeyComputerFactory normalizedKeyFactory =
+ JobGenHelper.variablesToAscNormalizedKeyComputerFactory(gbyCols, aggOpInputEnv, context);
// Calculates the hash table size (# of unique hash values) based on the budget and a tuple size.
int memoryBudgetInBytes = context.getFrameSize() * frameLimit;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
index 17322b6..c5ce871 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
@@ -82,8 +82,8 @@
@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
- IPartitioningProperty p = new UnorderedPartitionedProperty(new ListSet<LogicalVariable>(partitionFields),
- domain);
+ IPartitioningProperty p =
+ new UnorderedPartitionedProperty(new ListSet<LogicalVariable>(partitionFields), domain);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
List<ILocalStructuralProperty> op2Locals = op2.getDeliveredPhysicalProperties().getLocalProperties();
List<ILocalStructuralProperty> locals = new ArrayList<ILocalStructuralProperty>();
@@ -108,8 +108,8 @@
columns.add(new OrderColumn(var, oc.getOrder()));
}
orderProps.add(new LocalOrderProperty(columns));
- StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(null,
- orderProps) };
+ StructuralPropertiesVector[] r =
+ new StructuralPropertiesVector[] { new StructuralPropertiesVector(null, orderProps) };
return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
@@ -155,8 +155,8 @@
j++;
}
- IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields,
- comparatorFactories, nkcf);
+ IConnectorDescriptor conn =
+ new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comparatorFactories, nkcf);
return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
index 83591ee..301b8f1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
@@ -116,10 +116,10 @@
int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
- IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
- IBinaryHashFunctionFamily[] hashFunFamilies = JobGenHelper.variablesToBinaryHashFunctionFamilies(keysLeftBranch,
- env, context);
+ IBinaryHashFunctionFactory[] hashFunFactories =
+ JobGenHelper.variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
+ IBinaryHashFunctionFamily[] hashFunFamilies =
+ JobGenHelper.variablesToBinaryHashFunctionFamilies(keysLeftBranch, env, context);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
int i = 0;
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
@@ -128,13 +128,13 @@
comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
}
- IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider = context
- .getPredicateEvaluatorFactoryProvider();
+ IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider =
+ context.getPredicateEvaluatorFactoryProvider();
IPredicateEvaluatorFactory predEvaluatorFactory = predEvaluatorFactoryProvider == null ? null
: predEvaluatorFactoryProvider.getPredicateEvaluatorFactory(keysLeft, keysRight);
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc;
boolean optimizedHashJoin = true;
@@ -173,8 +173,8 @@
comparatorFactories, recDescriptor, predEvaluatorFactory, false, null);
break;
case LEFT_OUTER:
- IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1]
- .getSize()];
+ IMissingWriterFactory[] nonMatchWriterFactories =
+ new IMissingWriterFactory[inputSchemas[1].getSize()];
for (int j = 0; j < nonMatchWriterFactories.length; j++) {
nonMatchWriterFactories[j] = context.getMissingWriterFactory();
}
@@ -207,8 +207,8 @@
predEvaluatorFactory);
break;
case LEFT_OUTER:
- IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1]
- .getSize()];
+ IMissingWriterFactory[] nonMatchWriterFactories =
+ new IMissingWriterFactory[inputSchemas[1].getSize()];
for (int j = 0; j < nonMatchWriterFactories.length; j++) {
nonMatchWriterFactories[j] = context.getMissingWriterFactory();
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
index a1d496d..9c29c53 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
@@ -87,8 +87,8 @@
int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
- IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
+ IBinaryHashFunctionFactory[] hashFunFactories =
+ JobGenHelper.variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
int i = 0;
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
@@ -97,13 +97,13 @@
comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
}
- IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider = context
- .getPredicateEvaluatorFactoryProvider();
+ IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider =
+ context.getPredicateEvaluatorFactoryProvider();
IPredicateEvaluatorFactory predEvaluatorFactory = (predEvaluatorFactoryProvider == null ? null
: predEvaluatorFactoryProvider.getPredicateEvaluatorFactory(keysLeft, keysRight));
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc = null;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
index 48461b1..d304421 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
@@ -56,7 +56,8 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
int n = sortColumns.length;
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java
index 14032a1..fa0fb1a 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java
@@ -90,8 +90,8 @@
List<LogicalVariable> scanVariables = new ArrayList<>();
scanVariables.addAll(primaryKeys);
scanVariables.add(new LogicalVariable(-1));
- IPhysicalPropertiesVector physicalProps = dataSourceIndex.getDataSource().getPropertiesProvider()
- .computePropertiesVector(scanVariables);
+ IPhysicalPropertiesVector physicalProps =
+ dataSourceIndex.getDataSource().getPropertiesProvider().computePropertiesVector(scanVariables);
List<ILocalStructuralProperty> localProperties = new ArrayList<>();
List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
// Data needs to be sorted based on the [token, number of token, PK]
@@ -106,8 +106,8 @@
orderColumns.add(new OrderColumn(pkVar, OrderKind.ASC));
}
localProperties.add(new LocalOrderProperty(orderColumns));
- StructuralPropertiesVector spv = new StructuralPropertiesVector(physicalProps.getPartitioningProperty(),
- localProperties);
+ StructuralPropertiesVector spv =
+ new StructuralPropertiesVector(physicalProps.getPartitioningProperty(), localProperties);
return new PhysicalRequirements(new IPhysicalPropertiesVector[] { spv },
IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
@@ -132,9 +132,9 @@
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getIndexInsertRuntime(
- dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys,
- additionalFilteringKeys, filterExpr, inputDesc, context, spec, true);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getIndexInsertRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys,
+ secondaryKeys, additionalFilteringKeys, filterExpr, inputDesc, context, spec, true);
builder.contributeHyracksOperator(indexInsertDeleteOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = indexInsertDeleteOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java
index ce86e58..a66db35 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java
@@ -96,8 +96,8 @@
for (int i = 0; i < numOfAdditionalNonFilteringFields; i++) {
scanVariables.add(new LogicalVariable(-1));
}
- IPhysicalPropertiesVector r = dataSourceIndex.getDataSource().getPropertiesProvider()
- .computePropertiesVector(scanVariables);
+ IPhysicalPropertiesVector r =
+ dataSourceIndex.getDataSource().getPropertiesProvider().computePropertiesVector(scanVariables);
r.getLocalProperties().clear();
IPhysicalPropertiesVector[] requirements = new IPhysicalPropertiesVector[1];
requirements[0] = r;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java
index 0baffc9..1d36cc0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java
@@ -87,8 +87,8 @@
public void computeDeliveredProperties(ILogicalOperator iop, IOptimizationContext context)
throws AlgebricksException {
IntersectOperator op = (IntersectOperator) iop;
- IPartitioningProperty pp = op.getInputs().get(0).getValue().getDeliveredPhysicalProperties()
- .getPartitioningProperty();
+ IPartitioningProperty pp =
+ op.getInputs().get(0).getValue().getDeliveredPhysicalProperties().getPartitioningProperty();
HashMap<LogicalVariable, LogicalVariable> varMaps = new HashMap<>(op.getOutputVars().size());
for (int i = 0; i < op.getOutputVars().size(); i++) {
@@ -114,9 +114,8 @@
int nInput = logicalOp.getNumInput();
int[][] compareFields = new int[nInput][];
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper
- .variablesToAscBinaryComparatorFactories(logicalOp.getCompareVariables(0),
- context.getTypeEnvironment(op), context);
+ IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
+ logicalOp.getCompareVariables(0), context.getTypeEnvironment(op), context);
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
INormalizedKeyComputerFactory nkcf = null;
@@ -147,9 +146,8 @@
IntersectOperatorDescriptor opDescriptor;
try {
- opDescriptor =
- new IntersectOperatorDescriptor(spec, nInput, compareFields, extraFields, nkcf, comparatorFactories,
- recordDescriptor);
+ opDescriptor = new IntersectOperatorDescriptor(spec, nInput, compareFields, extraFields, nkcf,
+ comparatorFactories, recordDescriptor);
} catch (HyracksException e) {
throw new AlgebricksException(e);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java
index c55a4ae..a48e3c2 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java
@@ -70,10 +70,10 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
- MaterializingOperatorDescriptor materializationOpDesc = new MaterializingOperatorDescriptor(
- builder.getJobSpec(), recDescriptor, isSingleActivity);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+ MaterializingOperatorDescriptor materializationOpDesc =
+ new MaterializingOperatorDescriptor(builder.getJobSpec(), recDescriptor, isSingleActivity);
contributeOpDesc(builder, (AbstractLogicalOperator) op, materializationOpDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
index 2772ee7..629afa3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
@@ -63,14 +63,15 @@
int fdColumns[] = getFdColumns(gby, inputSchemas[0]);
// compile subplans and set the gby op. schema accordingly
AlgebricksPipeline[] subplans = compileSubplans(inputSchemas[0], gby, opSchema, context);
- IAggregatorDescriptorFactory aggregatorFactory = new NestedPlansAccumulatingAggregatorFactory(subplans, keys,
- fdColumns);
+ IAggregatorDescriptorFactory aggregatorFactory =
+ new NestedPlansAccumulatingAggregatorFactory(subplans, keys, fdColumns);
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
- columnList, env, context);
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
- RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()),
- inputSchemas[0], context);
+ IBinaryComparatorFactory[] comparatorFactories =
+ JobGenHelper.variablesToAscBinaryComparatorFactories(columnList, env, context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
+ RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor(
+ context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
MicroPreClusteredGroupRuntimeFactory runtime = new MicroPreClusteredGroupRuntimeFactory(keys,
comparatorFactories, aggregatorFactory, inputRecordDesc, recordDescriptor, null);
builder.contributeMicroOperator(gby, runtime, recordDescriptor);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java
index 80ed8fd..4d7bd7e1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java
@@ -129,15 +129,15 @@
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op;
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorSchema[] conditionInputSchemas = new IOperatorSchema[1];
conditionInputSchemas[0] = propagatedSchema;
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(join.getCondition().getValue(),
context.getTypeEnvironment(op), conditionInputSchemas, context);
- ITuplePairComparatorFactory comparatorFactory = new TuplePairEvaluatorFactory(cond,
- context.getBinaryBooleanInspectorFactory());
+ ITuplePairComparatorFactory comparatorFactory =
+ new TuplePairEvaluatorFactory(cond, context.getBinaryBooleanInspectorFactory());
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc = null;
@@ -212,8 +212,8 @@
int innerIndex) throws HyracksDataException {
compositeTupleRef.reset(outerAccessor, outerIndex, innerAccessor, innerIndex);
condEvaluator.evaluate(compositeTupleRef, p);
- boolean result = binaryBooleanInspector.getBooleanValue(p.getByteArray(), p.getStartOffset(),
- p.getLength());
+ boolean result =
+ binaryBooleanInspector.getBooleanValue(p.getByteArray(), p.getStartOffset(), p.getLength());
if (result) {
return 0;
} else {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
index 78e4795..0e0953c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
@@ -82,10 +82,10 @@
}
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
- columnList, context.getTypeEnvironment(op), context);
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
+ IBinaryComparatorFactory[] comparatorFactories = JobGenHelper
+ .variablesToAscBinaryComparatorFactories(columnList, context.getTypeEnvironment(op), context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
PreclusteredGroupOperatorDescriptor opDesc = new PreclusteredGroupOperatorDescriptor(spec, keys,
comparatorFactories, aggregatorFactory, recordDescriptor, groupAll, framesLimit);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java
index cba8f97..d17c0d9 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java
@@ -50,9 +50,9 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
- Pair<IConnectorDescriptor, TargetConstraint> connPair = createConnectorDescriptor(builder.getJobSpec(), op,
- opSchema, context);
+ throws AlgebricksException {
+ Pair<IConnectorDescriptor, TargetConstraint> connPair =
+ createConnectorDescriptor(builder.getJobSpec(), op, opSchema, context);
builder.contributeConnectorWithTargetConstraint(op, connPair.first, connPair.second);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java
index 225ffa0..6630d32 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java
@@ -58,7 +58,8 @@
private INodeDomain domain;
private IRangeMap rangeMap;
- public RangePartitionExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain, IRangeMap rangeMap) {
+ public RangePartitionExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain,
+ IRangeMap rangeMap) {
this.partitioningFields = partitioningFields;
this.domain = domain;
this.rangeMap = rangeMap;
@@ -79,7 +80,8 @@
@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
- IPartitioningProperty p = new OrderedPartitionedProperty(new ArrayList<OrderColumn>(partitioningFields), domain);
+ IPartitioningProperty p =
+ new OrderedPartitionedProperty(new ArrayList<OrderColumn>(partitioningFields), domain);
this.deliveredProperties = new StructuralPropertiesVector(p, new LinkedList<ILocalStructuralProperty>());
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java
index f56a5dc..ec32a53 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java
@@ -63,7 +63,8 @@
private INodeDomain domain;
private IRangeMap rangeMap;
- public RangePartitionMergeExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain, IRangeMap rangeMap) {
+ public RangePartitionMergeExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain,
+ IRangeMap rangeMap) {
this.partitioningFields = partitioningFields;
this.domain = domain;
this.rangeMap = rangeMap;
@@ -113,8 +114,8 @@
columns.add(new OrderColumn(var, oc.getOrder()));
}
orderProps.add(new LocalOrderProperty(columns));
- StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(null,
- orderProps) };
+ StructuralPropertiesVector[] r =
+ new StructuralPropertiesVector[] { new StructuralPropertiesVector(null, orderProps) };
return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
index 74739da..25d31d2 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
@@ -43,15 +43,15 @@
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
ReplicateOperator rop = (ReplicateOperator) op;
int outputArity = rop.getOutputArity();
boolean[] outputMaterializationFlags = rop.getOutputMaterializationFlags();
- ReplicateOperatorDescriptor splitOpDesc = new ReplicateOperatorDescriptor(spec, recDescriptor, outputArity,
- outputMaterializationFlags);
+ ReplicateOperatorDescriptor splitOpDesc =
+ new ReplicateOperatorDescriptor(spec, recDescriptor, outputArity, outputMaterializationFlags);
contributeOpDesc(builder, (AbstractLogicalOperator) op, splitOpDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
index 8e4ca18..3a6ba74 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
@@ -89,8 +89,8 @@
// TODO push projections into the operator
int[] projectionList = JobGenHelper.projectAllVariables(opSchema);
- RunningAggregateRuntimeFactory runtime = new RunningAggregateRuntimeFactory(outColumns, runningAggFuns,
- projectionList);
+ RunningAggregateRuntimeFactory runtime =
+ new RunningAggregateRuntimeFactory(outColumns, runningAggFuns, projectionList);
// contribute one Asterix framewriter
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
index d0b7b47..5084c18 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
@@ -68,7 +68,7 @@
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
- return emptyUnaryRequirements(op.getInputs().size());
+ return emptyUnaryRequirements(op.getInputs().size());
}
@Override
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
index 35f9444..f76b69b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
@@ -91,17 +91,18 @@
LogicalVariable v = varRef.getVariableReference();
columns[i++] = inputSchemas[0].findVariable(v);
}
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
- RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0],
- context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+ RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
+ context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- IPrinterFactory[] pf = JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op),
- context, columns);
+ IPrinterFactory[] pf =
+ JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op), context, columns);
IMetadataProvider<?, ?> mp = context.getMetadataProvider();
- Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> runtime = mp.getWriteFileRuntime(write.getDataSink(),
- columns, pf, inputDesc);
+ Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> runtime =
+ mp.getWriteFileRuntime(write.getDataSink(), columns, pf, inputDesc);
builder.contributeMicroOperator(write, runtime.first, recDesc, runtime.second);
ILogicalOperator src = write.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java
index c08ff85..1aeeca9 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java
@@ -188,8 +188,8 @@
AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) exprRef.getValue();
aff[i++] = expressionRuntimeProvider.createAggregateFunctionFactory(aggFun, aggOpInputEnv, inputSchemas,
context);
- intermediateTypes.add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv,
- context.getMetadataProvider()));
+ intermediateTypes
+ .add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider()));
}
int[] keyAndDecFields = new int[keys.length + fdColumns.length];
@@ -227,16 +227,16 @@
}
i++;
}
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
IAggregateEvaluatorFactory[] merges = new IAggregateEvaluatorFactory[n];
List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
IOperatorSchema[] localInputSchemas = new IOperatorSchema[1];
localInputSchemas[0] = new OperatorSchemaImpl();
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression aggFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
aggFun.getUsedVariables(usedVars);
}
i = 0;
@@ -250,18 +250,18 @@
localInputSchemas[0].addVariable(usedVar);
}
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression mergeFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression mergeFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
merges[i] = expressionRuntimeProvider.createAggregateFunctionFactory(mergeFun, aggOpInputEnv,
localInputSchemas, context);
}
- RecordDescriptor partialAggRecordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- localInputSchemas[0], context);
+ RecordDescriptor partialAggRecordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), localInputSchemas[0], context);
- IAggregatorDescriptorFactory aggregatorFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(aff,
- keyAndDecFields);
- IAggregatorDescriptorFactory mergeFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(merges,
- keyAndDecFields);
+ IAggregatorDescriptorFactory aggregatorFactory =
+ new SimpleAlgebricksAccumulatingAggregatorFactory(aff, keyAndDecFields);
+ IAggregatorDescriptorFactory mergeFactory =
+ new SimpleAlgebricksAccumulatingAggregatorFactory(merges, keyAndDecFields);
INormalizedKeyComputerFactory normalizedKeyFactory = null;
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
@@ -269,9 +269,9 @@
normalizedKeyFactory = null;
}
Object type = aggOpInputEnv.getVarType(gbyCols.get(0));
- normalizedKeyFactory = orderColumns[0].getOrder() == OrderKind.ASC ? nkcfProvider
- .getNormalizedKeyComputerFactory(type, true) : nkcfProvider
- .getNormalizedKeyComputerFactory(type, false);
+ normalizedKeyFactory =
+ orderColumns[0].getOrder() == OrderKind.ASC ? nkcfProvider.getNormalizedKeyComputerFactory(type, true)
+ : nkcfProvider.getNormalizedKeyComputerFactory(type, false);
SortGroupByOperatorDescriptor gbyOpDesc = new SortGroupByOperatorDescriptor(spec, frameLimit, keys,
keyAndDecFields, normalizedKeyFactory, compFactories, aggregatorFactory, mergeFactory,
partialAggRecordDescriptor, recordDescriptor, false);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
index 81f6e6b..6c02dca 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
@@ -124,8 +124,8 @@
IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
List<ILocalStructuralProperty> localProps = new ArrayList<ILocalStructuralProperty>(sortColumns.length);
localProps.add(new LocalOrderProperty(Arrays.asList(sortColumns)));
- StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(null,
- localProps) };
+ StructuralPropertiesVector[] r =
+ new StructuralPropertiesVector[] { new StructuralPropertiesVector(null, localProps) };
return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
index 923e56a..c9fde4b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
@@ -51,8 +51,8 @@
boolean propageToAllBranchAsDefault = sop.getPropageToAllBranchAsDefault();
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IScalarEvaluatorFactory brachingExprEvalFactory = expressionRuntimeProvider.createEvaluatorFactory(
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
index 120c1c4..3a4249b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
@@ -72,10 +72,10 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
+ throws AlgebricksException {
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
int n = sortColumns.length;
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
@@ -100,8 +100,8 @@
// topK == -1 means that a topK value is not provided.
if (topK == -1) {
- ExternalSortOperatorDescriptor sortOpDesc = new ExternalSortOperatorDescriptor(spec, maxNumberOfFrames,
- sortFields, nkcf, comps, recDescriptor);
+ ExternalSortOperatorDescriptor sortOpDesc =
+ new ExternalSortOperatorDescriptor(spec, maxNumberOfFrames, sortFields, nkcf, comps, recDescriptor);
contributeOpDesc(builder, (AbstractLogicalOperator) op, sortOpDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
index 99be356..da75da8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
@@ -62,8 +62,8 @@
ILogicalOperator op2 = op.getInputs().get(0).getValue();
if (limitOp.getExecutionMode() == AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
//partitioning property: unpartitioned; local property: whatever from the child
- deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, op2
- .getDeliveredPhysicalProperties().getLocalProperties());
+ deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED,
+ op2.getDeliveredPhysicalProperties().getLocalProperties());
} else {
deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
}
@@ -89,13 +89,13 @@
LimitOperator limit = (LimitOperator) op;
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
- IScalarEvaluatorFactory maxObjectsFact = expressionRuntimeProvider.createEvaluatorFactory(limit.getMaxObjects()
- .getValue(), env, inputSchemas, context);
+ IScalarEvaluatorFactory maxObjectsFact = expressionRuntimeProvider
+ .createEvaluatorFactory(limit.getMaxObjects().getValue(), env, inputSchemas, context);
ILogicalExpression offsetExpr = limit.getOffset().getValue();
- IScalarEvaluatorFactory offsetFact = (offsetExpr == null) ? null : expressionRuntimeProvider
- .createEvaluatorFactory(offsetExpr, env, inputSchemas, context);
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
- context);
+ IScalarEvaluatorFactory offsetFact = (offsetExpr == null) ? null
+ : expressionRuntimeProvider.createEvaluatorFactory(offsetExpr, env, inputSchemas, context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
StreamLimitRuntimeFactory runtime = new StreamLimitRuntimeFactory(maxObjectsFact, offsetFact, null,
context.getBinaryIntegerInspectorFactory());
builder.contributeMicroOperator(limit, runtime, recDesc);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
index 184cbbc..3ff7dc1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
@@ -68,8 +68,8 @@
projectionList[i++] = pos;
}
StreamProjectRuntimeFactory runtime = new StreamProjectRuntimeFactory(projectionList, flushFramesRapidly);
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
- context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
builder.contributeMicroOperator(project, runtime, recDesc);
ILogicalOperator src = project.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, project, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
index e87f3f6..ddde5f3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
@@ -66,10 +66,10 @@
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(
select.getCondition().getValue(), context.getTypeEnvironment(op), inputSchemas, context);
- StreamSelectRuntimeFactory runtime = new StreamSelectRuntimeFactory(cond, null,
- context.getBinaryBooleanInspectorFactory(), select.getRetainMissing(),
- inputSchemas[0].findVariable(select.getMissingPlaceholderVariable()),
- context.getMissingWriterFactory());
+ StreamSelectRuntimeFactory runtime =
+ new StreamSelectRuntimeFactory(cond, null, context.getBinaryBooleanInspectorFactory(),
+ select.getRetainMissing(), inputSchemas[0].findVariable(select.getMissingPlaceholderVariable()),
+ context.getMissingWriterFactory());
// contribute one Asterix framewriter
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
builder.contributeMicroOperator(select, runtime, recDesc);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
index 1f5159d..01e9a0c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
@@ -65,8 +65,8 @@
StringStreamingScriptDescription sssd = (StringStreamingScriptDescription) scriptDesc;
StringStreamingRuntimeFactory runtime = new StringStreamingRuntimeFactory(sssd.getCommand(),
sssd.getPrinterFactories(), sssd.getFieldDelimiter(), sssd.getParserFactory());
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
- context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
builder.contributeMicroOperator(scriptOp, runtime, recDesc);
// and contribute one edge from its child
ILogicalOperator src = scriptOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java
index 557a657..cd696bc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java
@@ -91,9 +91,9 @@
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getTokenizerRuntime(
- dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, null, inputDesc,
- context, spec, true);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getTokenizerRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys,
+ secondaryKeys, null, inputDesc, context, spec, true);
builder.contributeHyracksOperator(tokenizeOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = tokenizeOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
index d184161..a617064 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
@@ -62,12 +62,12 @@
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
- StructuralPropertiesVector pv0 = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op,
- new StructuralPropertiesVector(new RandomPartitioningProperty(context.getComputationNodeDomain()),
- null));
- StructuralPropertiesVector pv1 = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op,
- new StructuralPropertiesVector(new RandomPartitioningProperty(context.getComputationNodeDomain()),
- null));
+ StructuralPropertiesVector pv0 =
+ OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(
+ new RandomPartitioningProperty(context.getComputationNodeDomain()), null));
+ StructuralPropertiesVector pv1 =
+ OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(
+ new RandomPartitioningProperty(context.getComputationNodeDomain()), null));
return new PhysicalRequirements(new StructuralPropertiesVector[] { pv0, pv1 },
IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java
index 7002493..f8929e0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java
@@ -37,7 +37,8 @@
return app;
}
- @Override public String toString() {
+ @Override
+ public String toString() {
return app.toString();
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java
index 8318176..72f891a 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java
@@ -27,48 +27,40 @@
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
-
public class LogicalExpressionPrettyPrintVisitor implements ILogicalExpressionVisitor<String, Integer> {
@Override
- public String visitConstantExpression(ConstantExpression expr, Integer indent)
+ public String visitConstantExpression(ConstantExpression expr, Integer indent) throws AlgebricksException {
+ return expr.toString();
+ }
+
+ @Override
+ public String visitVariableReferenceExpression(VariableReferenceExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitVariableReferenceExpression(
- VariableReferenceExpression expr, Integer indent)
+ public String visitAggregateFunctionCallExpression(AggregateFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitAggregateFunctionCallExpression(
- AggregateFunctionCallExpression expr, Integer indent)
+ public String visitScalarFunctionCallExpression(ScalarFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitScalarFunctionCallExpression(
- ScalarFunctionCallExpression expr, Integer indent)
+ public String visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitStatefulFunctionCallExpression(
- StatefulFunctionCallExpression expr, Integer indent)
- throws AlgebricksException {
- return expr.toString();
- }
-
- @Override
- public String visitUnnestingFunctionCallExpression(
- UnnestingFunctionCallExpression expr, Integer indent)
+ public String visitUnnestingFunctionCallExpression(UnnestingFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
}
-
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
index 3e75c66..0ad3fea 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
@@ -394,8 +394,7 @@
if (op.getOperation() == Kind.UPSERT) {
buffer.append(" out: ([record-before-upsert:" + op.getBeforeOpRecordVar()
+ ((op.getBeforeOpAdditionalNonFilteringVars() != null)
- ? (", additional-before-upsert: " + op.getBeforeOpAdditionalNonFilteringVars())
- : "")
+ ? (", additional-before-upsert: " + op.getBeforeOpAdditionalNonFilteringVars()) : "")
+ "]) ");
}
if (op.isBulkload()) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
index f4f5d7f..d515fcf 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
@@ -46,55 +46,57 @@
}
};
- public static IPartitioningRequirementsCoordinator EQCLASS_PARTITIONING_COORDINATOR = new IPartitioningRequirementsCoordinator() {
+ public static IPartitioningRequirementsCoordinator EQCLASS_PARTITIONING_COORDINATOR =
+ new IPartitioningRequirementsCoordinator() {
- @Override
- public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty rqdpp,
- IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context)
- throws AlgebricksException {
- if (firstDeliveredPartitioning != null && rqdpp != null
- && firstDeliveredPartitioning.getPartitioningType() == rqdpp.getPartitioningType()) {
- switch (rqdpp.getPartitioningType()) {
- case UNORDERED_PARTITIONED: {
- UnorderedPartitionedProperty upp1 = (UnorderedPartitionedProperty) firstDeliveredPartitioning;
- Set<LogicalVariable> set1 = upp1.getColumnSet();
- UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) rqdpp;
- Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
- Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
- Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
+ @Override
+ public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty rqdpp,
+ IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op,
+ IOptimizationContext context) throws AlgebricksException {
+ if (firstDeliveredPartitioning != null && rqdpp != null
+ && firstDeliveredPartitioning.getPartitioningType() == rqdpp.getPartitioningType()) {
+ switch (rqdpp.getPartitioningType()) {
+ case UNORDERED_PARTITIONED: {
+ UnorderedPartitionedProperty upp1 =
+ (UnorderedPartitionedProperty) firstDeliveredPartitioning;
+ Set<LogicalVariable> set1 = upp1.getColumnSet();
+ UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) rqdpp;
+ Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
+ Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
+ Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
- // coordinate from an existing partition property
- // (firstDeliveredPartitioning)
- for (LogicalVariable v : set1) {
- EquivalenceClass ecFirst = eqmap.get(v);
- for (LogicalVariable r : uppreq.getColumnSet()) {
- EquivalenceClass ec = eqmap.get(r);
- if (ecFirst == ec) {
- covered.add(v);
- modifuppreq.add(r);
- break;
+ // coordinate from an existing partition property
+ // (firstDeliveredPartitioning)
+ for (LogicalVariable v : set1) {
+ EquivalenceClass ecFirst = eqmap.get(v);
+ for (LogicalVariable r : uppreq.getColumnSet()) {
+ EquivalenceClass ec = eqmap.get(r);
+ if (ecFirst == ec) {
+ covered.add(v);
+ modifuppreq.add(r);
+ break;
+ }
+ }
}
+
+ if (!covered.equals(set1)) {
+ throw new AlgebricksException("Could not modify " + rqdpp
+ + " to agree with partitioning property " + firstDeliveredPartitioning
+ + " delivered by previous input operator.");
+ }
+ UnorderedPartitionedProperty upp2 =
+ new UnorderedPartitionedProperty(modifuppreq, rqdpp.getNodeDomain());
+ return new Pair<Boolean, IPartitioningProperty>(false, upp2);
+ }
+ case ORDERED_PARTITIONED: {
+ throw new NotImplementedException();
}
}
-
- if (!covered.equals(set1)) {
- throw new AlgebricksException("Could not modify " + rqdpp
- + " to agree with partitioning property " + firstDeliveredPartitioning
- + " delivered by previous input operator.");
- }
- UnorderedPartitionedProperty upp2 = new UnorderedPartitionedProperty(modifuppreq,
- rqdpp.getNodeDomain());
- return new Pair<Boolean, IPartitioningProperty>(false, upp2);
}
- case ORDERED_PARTITIONED: {
- throw new NotImplementedException();
- }
+ return new Pair<Boolean, IPartitioningProperty>(true, rqdpp);
}
- }
- return new Pair<Boolean, IPartitioningProperty>(true, rqdpp);
- }
- };
+ };
public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty requirements,
IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context)
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
index af91a3a..1cd7e64 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
@@ -114,8 +114,8 @@
return null;
}
int numColumns = newColumns.size();
- List<LogicalVariable> newOrderEnforcer = preferredOrderEnforcer.size() > numColumns ? preferredOrderEnforcer
- .subList(0, numColumns) : preferredOrderEnforcer;
+ List<LogicalVariable> newOrderEnforcer = preferredOrderEnforcer.size() > numColumns
+ ? preferredOrderEnforcer.subList(0, numColumns) : preferredOrderEnforcer;
return new LocalGroupingProperty(newColumns, newOrderEnforcer);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
index c6b4618..aa6afdb 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
@@ -86,8 +86,8 @@
if (reqdPart != null) {
IPartitioningProperty normalizedReqPart =
reqdPart.normalize(equivalenceClasses, mayExpandProperties ? fds : null);
- IPartitioningProperty normalizedPropPart = propPartitioning.normalize(equivalenceClasses,
- mayExpandProperties ? fds : null);
+ IPartitioningProperty normalizedPropPart =
+ propPartitioning.normalize(equivalenceClasses, mayExpandProperties ? fds : null);
if (!PropertiesUtil.matchPartitioningProps(normalizedReqPart, normalizedPropPart, mayExpandProperties)) {
diffPart = reqdPart;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
index 566c13e..9d60370 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
@@ -71,8 +71,8 @@
// found in both inner and outer branches. Fix computeOutputTypeEnvironment() in ProjectOperator
// and investigate why many test queries fail if only live variables' types are propagated.
for (int i = n - 1; i >= 0; i--) {
- Object t = typeEnvs[i].getTypeEnv().getVarType(var, nonNullVariableList,
- correlatedNullableVariableLists);
+ Object t =
+ typeEnvs[i].getTypeEnv().getVarType(var, nonNullVariableList, correlatedNullableVariableLists);
if (t == null) {
continue;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
index 8d00696..249e66f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
@@ -40,8 +40,7 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.
- LogicalOperatorDeepCopyWithNewVariablesVisitor;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.LogicalOperatorDeepCopyWithNewVariablesVisitor;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.OperatorDeepCopyVisitor;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
@@ -102,8 +101,8 @@
}
case NESTEDTUPLESOURCE: {
NestedTupleSourceOperator nts = (NestedTupleSourceOperator) op;
- AbstractLogicalOperator prevOp = (AbstractLogicalOperator) nts.getDataSourceReference().getValue()
- .getInputs().get(0).getValue();
+ AbstractLogicalOperator prevOp =
+ (AbstractLogicalOperator) nts.getDataSourceReference().getValue().getInputs().get(0).getValue();
if (prevOp.getExecutionMode() != AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
nts.setExecutionMode(AbstractLogicalOperator.ExecutionMode.LOCAL);
}
@@ -169,8 +168,8 @@
if (op.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE && goThroughNts) {
NestedTupleSourceOperator nts = (NestedTupleSourceOperator) op;
if (nts.getDataSourceReference() != null) {
- AbstractLogicalOperator op2 = (AbstractLogicalOperator) nts.getDataSourceReference().getValue()
- .getInputs().get(0).getValue();
+ AbstractLogicalOperator op2 =
+ (AbstractLogicalOperator) nts.getDataSourceReference().getValue().getInputs().get(0).getValue();
substituteVarRec(op2, v1, v2, goThroughNts, ctx);
}
}
@@ -202,8 +201,8 @@
public static Pair<ILogicalOperator, Map<LogicalVariable, LogicalVariable>> deepCopyWithNewVars(
ILogicalOperator root, IOptimizationContext ctx) throws AlgebricksException {
- LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor = new
- LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx, null, true);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx, null, true);
ILogicalOperator newRoot = deepCopyVisitor.deepCopy(root);
return Pair.of(newRoot, deepCopyVisitor.getInputToOutputVariableMapping());
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
index 9dd1ade..e0d806d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
@@ -345,9 +345,8 @@
StructuralPropertiesVector partitionedPropertiesVector) {
ILogicalOperator leftChild = op.getInputs().get(0).getValue();
ILogicalOperator rightChild = op.getInputs().get(1).getValue();
- boolean unPartitioned =
- leftChild.getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) && rightChild
- .getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
+ boolean unPartitioned = leftChild.getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED)
+ && rightChild.getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
return unPartitioned ? StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR : partitionedPropertiesVector;
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
index 2fa4672..695630c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
@@ -39,7 +39,8 @@
return visitFunctionCallExpression(expr, arg);
}
- public R visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, T arg) throws AlgebricksException {
+ public R visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, T arg)
+ throws AlgebricksException {
return visitFunctionCallExpression(expr, arg);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
index 4c42db8..13eef09 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
@@ -59,7 +59,8 @@
private final Map<ILogicalOperator, AlgebricksPartitionConstraint> pcForMicroOps = new HashMap<>();
private final Map<ILogicalOperator, Integer> algebraicOpBelongingToMetaAsterixOp = new HashMap<>();
- private final Map<Integer, List<Pair<IPushRuntimeFactory, RecordDescriptor>>> metaAsterixOpSkeletons = new HashMap<>();
+ private final Map<Integer, List<Pair<IPushRuntimeFactory, RecordDescriptor>>> metaAsterixOpSkeletons =
+ new HashMap<>();
private final Map<Integer, AlgebricksMetaOperatorDescriptor> metaAsterixOps = new HashMap<>();
private final Map<IOperatorDescriptor, AlgebricksPartitionConstraint> partitionConstraintMap = new HashMap<>();
@@ -205,8 +206,8 @@
if (opInputs != null) {
for (IConnectorDescriptor conn : opInputs) {
ConnectorDescriptorId cid = conn.getConnectorId();
- org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec
- .getConnectorOperatorMap().get(cid);
+ org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p =
+ jobSpec.getConnectorOperatorMap().get(cid);
IOperatorDescriptor src = p.getLeft().getLeft();
TargetConstraint constraint = tgtConstraints.get(conn);
if (constraint != null) {
@@ -236,8 +237,8 @@
if (opInputs != null) {
for (IConnectorDescriptor conn : opInputs) {
ConnectorDescriptorId cid = conn.getConnectorId();
- org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec
- .getConnectorOperatorMap().get(cid);
+ org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p =
+ jobSpec.getConnectorOperatorMap().get(cid);
IOperatorDescriptor src = p.getLeft().getLeft();
// Pre-order DFS
setPartitionConstraintsBottomup(src.getOperatorId(), tgtConstraints, opDesc, finalPass);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
index 947bac1..b204bcb 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
@@ -96,7 +96,7 @@
public static IBinaryHashFunctionFactory[] variablesToBinaryHashFunctionFactories(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
IBinaryHashFunctionFactory[] funFactories = new IBinaryHashFunctionFactory[varLogical.size()];
int i = 0;
IBinaryHashFunctionFactoryProvider bhffProvider = context.getBinaryHashFunctionFactoryProvider();
@@ -109,7 +109,7 @@
public static IBinaryHashFunctionFamily[] variablesToBinaryHashFunctionFamilies(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
IBinaryHashFunctionFamily[] funFamilies = new IBinaryHashFunctionFamily[varLogical.size()];
int i = 0;
IBinaryHashFunctionFamilyProvider bhffProvider = context.getBinaryHashFunctionFamilyProvider();
@@ -122,7 +122,7 @@
public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[varLogical.size()];
IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
int i = 0;
@@ -146,7 +146,7 @@
public static INormalizedKeyComputerFactory variablesToAscNormalizedKeyComputerFactory(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
if (nkcfProvider == null)
return null;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
index f817cd6..7409247 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
@@ -37,7 +37,8 @@
public class PlanCompiler {
private JobGenContext context;
- private Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> operatorVisitedToParents = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
+ private Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> operatorVisitedToParents =
+ new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
public PlanCompiler(JobGenContext context) {
this.context = context;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
index 0a4b298..4388032 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
@@ -37,13 +37,13 @@
public class HeuristicOptimizer {
- public static PhysicalOperatorTag[] hyracksOperators = new PhysicalOperatorTag[] {
- PhysicalOperatorTag.DATASOURCE_SCAN, PhysicalOperatorTag.BTREE_SEARCH,
- PhysicalOperatorTag.EXTERNAL_GROUP_BY, PhysicalOperatorTag.HASH_GROUP_BY, PhysicalOperatorTag.HDFS_READER,
- PhysicalOperatorTag.HYBRID_HASH_JOIN, PhysicalOperatorTag.IN_MEMORY_HASH_JOIN,
- PhysicalOperatorTag.NESTED_LOOP, PhysicalOperatorTag.PRE_SORTED_DISTINCT_BY,
- PhysicalOperatorTag.PRE_CLUSTERED_GROUP_BY, PhysicalOperatorTag.REPLICATE, PhysicalOperatorTag.STABLE_SORT,
- PhysicalOperatorTag.UNION_ALL };
+ public static PhysicalOperatorTag[] hyracksOperators =
+ new PhysicalOperatorTag[] { PhysicalOperatorTag.DATASOURCE_SCAN, PhysicalOperatorTag.BTREE_SEARCH,
+ PhysicalOperatorTag.EXTERNAL_GROUP_BY, PhysicalOperatorTag.HASH_GROUP_BY,
+ PhysicalOperatorTag.HDFS_READER, PhysicalOperatorTag.HYBRID_HASH_JOIN,
+ PhysicalOperatorTag.IN_MEMORY_HASH_JOIN, PhysicalOperatorTag.NESTED_LOOP,
+ PhysicalOperatorTag.PRE_SORTED_DISTINCT_BY, PhysicalOperatorTag.PRE_CLUSTERED_GROUP_BY,
+ PhysicalOperatorTag.REPLICATE, PhysicalOperatorTag.STABLE_SORT, PhysicalOperatorTag.UNION_ALL };
public static PhysicalOperatorTag[] hyraxOperatorsBelowWhichJobGenIsDisabled = new PhysicalOperatorTag[] {};
public static boolean isHyracksOp(PhysicalOperatorTag opTag) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
index 10e3432..e782e4f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
@@ -79,7 +79,7 @@
public class Node {
private final StringValue nodeId;
- private HashMap<String,AttributeValue> attributes = new HashMap<>();
+ private HashMap<String, AttributeValue> attributes = new HashMap<>();
// no instantiation
private Node(StringValue nodeId, StringValue nodeLabel) {
@@ -142,7 +142,7 @@
public class Edge {
private final Node source;
private final Node destination;
- private final HashMap<String,AttributeValue> attributes = new HashMap<>();
+ private final HashMap<String, AttributeValue> attributes = new HashMap<>();
// no instantiation
private Edge(Node source, Node destination) {
@@ -214,7 +214,7 @@
public static final class StringValue extends AttributeValue {
// no instantiation
- private StringValue (String value) {
+ private StringValue(String value) {
super(value);
}
@@ -224,7 +224,7 @@
newValue = "";
}
newValue = newValue.replace("\n", "\\n");
- return new StringValue("\"" + newValue.replace("\"","\'").trim() + "\"");
+ return new StringValue("\"" + newValue.replace("\"", "\'").trim() + "\"");
}
}
@@ -233,7 +233,7 @@
public static final Color SKYBLUE = new Color("skyblue");
// no instantiation
- private Color (String color) {
+ private Color(String color) {
super(color);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java
index 1ea2d19..8ada0ac 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java
@@ -56,8 +56,7 @@
* @return DOT format
*/
public static String generate(final JobActivityGraph jobActivityGraph) {
- final DotFormatBuilder graphBuilder =
- new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobActivityGraph"));
+ final DotFormatBuilder graphBuilder = new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobActivityGraph"));
List<IConnectorDescriptor> connectors;
IActivity activity;
ActivityId fromActivityId;
@@ -154,11 +153,10 @@
* @return DOT format
*/
public static String generate(final JobSpecification jobSpecification) {
- final DotFormatBuilder graphBuilder =
- new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobSpecification"));
+ final DotFormatBuilder graphBuilder = new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobSpecification"));
final Map<ConnectorDescriptorId, IConnectorDescriptor> connectorMap = jobSpecification.getConnectorMap();
- final Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>>
- cOp = jobSpecification.getConnectorOperatorMap();
+ final Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> cOp =
+ jobSpecification.getConnectorOperatorMap();
ConnectorDescriptorId connectorId;
IConnectorDescriptor connector;
IOperatorDescriptor leftOperator;
@@ -168,24 +166,24 @@
String source;
String destination;
String edgeLabel;
- for (Map.Entry<ConnectorDescriptorId,
- Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : cOp.entrySet()) {
+ for (Map.Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : cOp
+ .entrySet()) {
connectorId = entry.getKey();
connector = connectorMap.get(connectorId);
edgeLabel = connector.getClass().getName().substring(connector.getClass().getName().lastIndexOf(".") + 1);
edgeLabel += "-" + connectorId;
leftOperator = entry.getValue().getLeft().getLeft();
rightOperator = entry.getValue().getRight().getLeft();
- source = leftOperator.getClass().getName().substring(
- leftOperator.getClass().getName().lastIndexOf(".") + 1);
- sourceNode = graphBuilder.createNode(
- DotFormatBuilder.StringValue.of(leftOperator.getOperatorId().toString()),
- DotFormatBuilder.StringValue.of(leftOperator.toString() + "-" + source));
- destination = rightOperator.getClass().getName().substring(
- rightOperator.getClass().getName().lastIndexOf(".") + 1);
- destinationNode = graphBuilder.createNode(
- DotFormatBuilder.StringValue.of(rightOperator.getOperatorId().toString()),
- DotFormatBuilder.StringValue.of(rightOperator.toString() + "-" + destination));
+ source = leftOperator.getClass().getName()
+ .substring(leftOperator.getClass().getName().lastIndexOf(".") + 1);
+ sourceNode =
+ graphBuilder.createNode(DotFormatBuilder.StringValue.of(leftOperator.getOperatorId().toString()),
+ DotFormatBuilder.StringValue.of(leftOperator.toString() + "-" + source));
+ destination = rightOperator.getClass().getName()
+ .substring(rightOperator.getClass().getName().lastIndexOf(".") + 1);
+ destinationNode =
+ graphBuilder.createNode(DotFormatBuilder.StringValue.of(rightOperator.getOperatorId().toString()),
+ DotFormatBuilder.StringValue.of(rightOperator.toString() + "-" + destination));
graphBuilder.createEdge(sourceNode, destinationNode).setLabel(DotFormatBuilder.StringValue.of(edgeLabel));
}
@@ -209,17 +207,16 @@
}
public static void generateNode(DotFormatBuilder dotBuilder, ILogicalOperator op,
- LogicalOperatorDotVisitor dotVisitor, Set<ILogicalOperator> operatorsVisited)
- throws AlgebricksException {
+ LogicalOperatorDotVisitor dotVisitor, Set<ILogicalOperator> operatorsVisited) throws AlgebricksException {
DotFormatBuilder.StringValue destinationNodeLabel = formatStringOf(op, dotVisitor);
- DotFormatBuilder.Node destinationNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(op.hashCode())), destinationNodeLabel);
+ DotFormatBuilder.Node destinationNode = dotBuilder
+ .createNode(DotFormatBuilder.StringValue.of(Integer.toString(op.hashCode())), destinationNodeLabel);
DotFormatBuilder.StringValue sourceNodeLabel;
DotFormatBuilder.Node sourceNode;
for (Mutable<ILogicalOperator> child : op.getInputs()) {
sourceNodeLabel = formatStringOf(child.getValue(), dotVisitor);
- sourceNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(child.getValue().hashCode())), sourceNodeLabel);
+ sourceNode = dotBuilder.createNode(
+ DotFormatBuilder.StringValue.of(Integer.toString(child.getValue().hashCode())), sourceNodeLabel);
dotBuilder.createEdge(sourceNode, destinationNode);
if (!operatorsVisited.contains(child.getValue())) {
generateNode(dotBuilder, child.getValue(), dotVisitor, operatorsVisited);
@@ -230,10 +227,9 @@
for (ILogicalPlan nestedPlan : ((AbstractOperatorWithNestedPlans) op).getNestedPlans()) {
nestedOperator = nestedPlan.getRoots().get(0).getValue();
sourceNodeLabel = formatStringOf(nestedOperator, dotVisitor);
- sourceNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(nestedOperator.hashCode())), sourceNodeLabel);
- dotBuilder.createEdge(sourceNode, destinationNode).
- setLabel(DotFormatBuilder.StringValue.of("subplan"));
+ sourceNode = dotBuilder.createNode(
+ DotFormatBuilder.StringValue.of(Integer.toString(nestedOperator.hashCode())), sourceNodeLabel);
+ dotBuilder.createEdge(sourceNode, destinationNode).setLabel(DotFormatBuilder.StringValue.of("subplan"));
if (!operatorsVisited.contains(nestedOperator)) {
generateNode(dotBuilder, nestedOperator, dotVisitor, operatorsVisited);
}
@@ -251,8 +247,9 @@
for (int i = 0; i < replicateOperator.getOutputs().size(); i++) {
replicateOutput = replicateOperator.getOutputs().get(i).getValue();
destinationNodeLabel = formatStringOf(replicateOutput, dotVisitor);
- destinationNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(replicateOutput.hashCode())), destinationNodeLabel);
+ destinationNode = dotBuilder.createNode(
+ DotFormatBuilder.StringValue.of(Integer.toString(replicateOutput.hashCode())),
+ destinationNodeLabel);
if (replicateOperator.getOutputMaterializationFlags()[i]) {
dotBuilder.createEdge(sourceNode, destinationNode).setColor(DotFormatBuilder.Color.RED);
} else {
@@ -267,7 +264,7 @@
private static DotFormatBuilder.StringValue formatStringOf(ILogicalOperator operator,
LogicalOperatorDotVisitor dotVisitor) throws AlgebricksException {
String formattedString = operator.accept(dotVisitor, null).trim();
- IPhysicalOperator physicalOperator = ((AbstractLogicalOperator)operator).getPhysicalOperator();
+ IPhysicalOperator physicalOperator = ((AbstractLogicalOperator) operator).getPhysicalOperator();
if (physicalOperator != null) {
formattedString += "\\n" + physicalOperator.toString().trim() + " |" + operator.getExecutionMode() + "|";
} else {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java
index a54ff63..4649d6d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java
@@ -350,7 +350,7 @@
@Override
public String visitScriptOperator(ScriptOperator op, Void noArgs) throws AlgebricksException {
stringBuilder.setLength(0);
- stringBuilder.append("script (in: ").append(op.getInputVariables()).append(") (out: " )
+ stringBuilder.append("script (in: ").append(op.getInputVariables()).append(") (out: ")
.append(op.getOutputVariables()).append(")");
return stringBuilder.toString();
}
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
index 0992489..93dd3d5 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
@@ -24,6 +24,5 @@
public interface IBinaryHashFunctionFamilyProvider {
- public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
- throws AlgebricksException;
+ public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java
index 97e7d95..ba27c4e 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java
+++ b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java
@@ -26,10 +26,10 @@
public final class WriteValueTools {
- private final static int[] INT_INTERVALS = { 9, 99, 999, 9999, 99999, 999999, 9999999, 99999999, 999999999,
- Integer.MAX_VALUE };
- private final static int[] INT_DIVIDERS = { 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000,
- 1000000000 };
+ private final static int[] INT_INTERVALS =
+ { 9, 99, 999, 9999, 99999, 999999, 9999999, 99999999, 999999999, Integer.MAX_VALUE };
+ private final static int[] INT_DIVIDERS =
+ { 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000 };
private final static int[] DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' };
public static void writeInt(int i, OutputStream os) throws IOException {
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
index 2870074..3f61cc0 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
@@ -65,9 +65,8 @@
}
}
- protected Pair<Boolean, Mutable<ILogicalOperator>> tryToPushAgg(AggregateOperator initAgg,
- GroupByOperator newGbyOp, Set<SimilarAggregatesInfo> toReplaceSet, IOptimizationContext context)
- throws AlgebricksException {
+ protected Pair<Boolean, Mutable<ILogicalOperator>> tryToPushAgg(AggregateOperator initAgg, GroupByOperator newGbyOp,
+ Set<SimilarAggregatesInfo> toReplaceSet, IOptimizationContext context) throws AlgebricksException {
List<LogicalVariable> initVars = initAgg.getVariables();
List<Mutable<ILogicalExpression>> initExprs = initAgg.getExpressions();
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java
index ed4196b..a921301 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java
@@ -212,7 +212,7 @@
private Pair<Boolean, ILogicalPlan> tryToPushSubplan(ILogicalPlan nestedPlan, GroupByOperator oldGbyOp,
GroupByOperator newGbyOp, BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
List<Mutable<ILogicalOperator>> pushedRoots = new ArrayList<Mutable<ILogicalOperator>>();
Set<SimilarAggregatesInfo> toReplaceSet = new HashSet<SimilarAggregatesInfo>();
for (Mutable<ILogicalOperator> r : nestedPlan.getRoots()) {
@@ -272,7 +272,7 @@
private boolean tryToPushRoot(Mutable<ILogicalOperator> root, GroupByOperator oldGbyOp, GroupByOperator newGbyOp,
BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context,
List<Mutable<ILogicalOperator>> toPushAccumulate, Set<SimilarAggregatesInfo> toReplaceSet)
- throws AlgebricksException {
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) root.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
@@ -399,8 +399,7 @@
* @return the bottom-most reference of a select operator
*/
private Mutable<ILogicalOperator> findBottomOpRefStayInOldGby(GroupByOperator nestedGby,
- Mutable<ILogicalOperator> currentOpRef)
- throws AlgebricksException {
+ Mutable<ILogicalOperator> currentOpRef) throws AlgebricksException {
Set<LogicalVariable> usedVarsInNestedGby = new HashSet<>();
// Collects used variables in nested pipelines.
for (ILogicalPlan nestedPlan : nestedGby.getNestedPlans()) {
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
index f5bec22..d975cce 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
@@ -43,7 +43,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
return false;
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
index 2ab8520..fa35a98 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
@@ -115,8 +115,8 @@
outerRoot = buildOperatorChain(outerOps, null, context);
context.computeAndSetTypeEnvironmentForOperator(outerRoot);
- InnerJoinOperator product = new InnerJoinOperator(
- new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
+ InnerJoinOperator product =
+ new InnerJoinOperator(new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
// Outer branch.
product.getInputs().add(new MutableObject<ILogicalOperator>(outerRoot));
// Inner branch.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java
index 372af26..53548e4 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java
@@ -91,8 +91,8 @@
// Need to add an offset to the given limit value
// since the original topmost limit will use the offset value.
// We can't apply the offset multiple times.
- IFunctionInfo finfoAdd = context.getMetadataProvider()
- .lookupFunction(AlgebricksBuiltinFunctions.NUMERIC_ADD);
+ IFunctionInfo finfoAdd =
+ context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NUMERIC_ADD);
List<Mutable<ILogicalExpression>> addArgs = new ArrayList<>();
addArgs.add(
new MutableObject<ILogicalExpression>(limitOp.getMaxObjects().getValue().cloneExpression()));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
index fcec50a..baad59b 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
@@ -64,7 +64,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -108,7 +109,7 @@
foundTarget = false;
break;
}
- if(child.getOperatorTag() == LogicalOperatorTag.GROUP){
+ if (child.getOperatorTag() == LogicalOperatorTag.GROUP) {
foundTarget = false;
break;
}
@@ -137,8 +138,8 @@
return false;
}
}
- List<Pair<IOrder, Mutable<ILogicalExpression>>> orderExprs = deepCopyOrderAndExpression(sourceOrderOp
- .getOrderExpressions());
+ List<Pair<IOrder, Mutable<ILogicalExpression>>> orderExprs =
+ deepCopyOrderAndExpression(sourceOrderOp.getOrderExpressions());
OrderOperator newOrderOp = new OrderOperator(orderExprs);
context.addToDontApplySet(this, newOrderOp);
inputs.set(i, new MutableObject<ILogicalOperator>(newOrderOp));
@@ -155,15 +156,17 @@
}
private Mutable<ILogicalExpression> deepCopyExpressionRef(Mutable<ILogicalExpression> oldExpr) {
- return new MutableObject<ILogicalExpression>(((AbstractLogicalExpression) oldExpr.getValue()).cloneExpression());
+ return new MutableObject<ILogicalExpression>(
+ ((AbstractLogicalExpression) oldExpr.getValue()).cloneExpression());
}
private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderAndExpression(
List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
- List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();
+ List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs =
+ new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();
for (Pair<IOrder, Mutable<ILogicalExpression>> pair : ordersAndExprs)
- newOrdersAndExprs.add(new Pair<IOrder, Mutable<ILogicalExpression>>(pair.first,
- deepCopyExpressionRef(pair.second)));
+ newOrdersAndExprs
+ .add(new Pair<IOrder, Mutable<ILogicalExpression>>(pair.first, deepCopyExpressionRef(pair.second)));
return newOrdersAndExprs;
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
index 6763e2b..da0466e 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
@@ -149,8 +149,8 @@
}
AbstractLogicalOperator op = (AbstractLogicalOperator) root.getValue();
op.computeDeliveredPhysicalProperties(context);
- AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Structural properties for " + op.getPhysicalOperator()
- + ": " + op.getDeliveredPhysicalProperties() + "\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Structural properties for " + op.getPhysicalOperator() + ": "
+ + op.getDeliveredPhysicalProperties() + "\n");
}
return changed;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
index f51c9ea..b95d6e4 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
@@ -89,7 +89,8 @@
private final List<ILogicalExpression> originalAssignExprs = new ArrayList<ILogicalExpression>();
private final CommonExpressionSubstitutionVisitor substVisitor = new CommonExpressionSubstitutionVisitor();
- private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap = new HashMap<ILogicalExpression, ExprEquivalenceClass>();
+ private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap =
+ new HashMap<ILogicalExpression, ExprEquivalenceClass>();
// Set of operators for which common subexpression elimination should not be performed.
private static final Set<LogicalOperatorTag> ignoreOps = new HashSet<LogicalOperatorTag>(6);
@@ -310,8 +311,8 @@
return false;
}
// Place a Select operator beneath op that contains the enclosing expression.
- SelectOperator selectOp = new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr),
- false, null);
+ SelectOperator selectOp =
+ new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr), false, null);
selectOp.getInputs().add(new MutableObject<ILogicalOperator>(op.getInputs().get(0).getValue()));
op.getInputs().get(0).setValue(selectOp);
// Set firstOp to be the select below op, since we want to assign the common subexpr there.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
index 5a4cacd..923ffb5 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
@@ -52,8 +52,8 @@
public class ExtractCommonOperatorsRule implements IAlgebraicRewriteRule {
- private final HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childrenToParents
- = new HashMap<>();
+ private final HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childrenToParents =
+ new HashMap<>();
private final List<Mutable<ILogicalOperator>> roots = new ArrayList<>();
private final List<List<Mutable<ILogicalOperator>>> equivalenceClasses = new ArrayList<>();
private final HashMap<Mutable<ILogicalOperator>, BitSet> opToCandidateInputs = new HashMap<>();
@@ -210,7 +210,8 @@
continue;
}
ArrayList<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
- Map<LogicalVariable, LogicalVariable> variableMappingBack = new HashMap<LogicalVariable, LogicalVariable>();
+ Map<LogicalVariable, LogicalVariable> variableMappingBack =
+ new HashMap<LogicalVariable, LogicalVariable>();
IsomorphismUtilities.mapVariablesTopDown(ref.getValue(), candidate.getValue(), variableMappingBack);
for (int i = 0; i < liveVarsNew.size(); i++) {
liveVars.add(variableMappingBack.get(liveVarsNew.get(i)));
@@ -240,8 +241,8 @@
for (Mutable<ILogicalOperator> parentOpRef : parentOpList) {
AbstractLogicalOperator parentOp = (AbstractLogicalOperator) parentOpRef.getValue();
int index = parentOp.getInputs().indexOf(ref);
- ILogicalOperator childOp = parentOp.getOperatorTag() == LogicalOperatorTag.PROJECT ? assignOperator
- : projectOperator;
+ ILogicalOperator childOp =
+ parentOp.getOperatorTag() == LogicalOperatorTag.PROJECT ? assignOperator : projectOperator;
if (!HeuristicOptimizer.isHyracksOp(parentOp.getPhysicalOperator().getOperatorTag())) {
parentOp.getInputs().set(index, new MutableObject<ILogicalOperator>(childOp));
} else {
@@ -263,7 +264,8 @@
}
private void genCandidates(IOptimizationContext context) throws AlgebricksException {
- List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses = new ArrayList<List<Mutable<ILogicalOperator>>>();
+ List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses =
+ new ArrayList<List<Mutable<ILogicalOperator>>>();
while (equivalenceClasses.size() > 0) {
previousEquivalenceClasses.clear();
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
@@ -364,7 +366,8 @@
}
private void prune(IOptimizationContext context) throws AlgebricksException {
- List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses = new ArrayList<List<Mutable<ILogicalOperator>>>();
+ List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses =
+ new ArrayList<List<Mutable<ILogicalOperator>>>();
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
List<Mutable<ILogicalOperator>> candidatesCopy = new ArrayList<Mutable<ILogicalOperator>>();
candidatesCopy.addAll(candidates);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java
index 71a00bf..198510a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java
@@ -63,7 +63,8 @@
public class ExtractFunctionsFromJoinConditionRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -104,8 +105,8 @@
for (Mutable<ILogicalExpression> exprRef : fexp.getArguments()) {
if (exprRef.getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
LogicalVariable newVar = context.newVar();
- AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(exprRef
- .getValue().cloneExpression()));
+ AssignOperator newAssign = new AssignOperator(newVar,
+ new MutableObject<ILogicalExpression>(exprRef.getValue().cloneExpression()));
newAssign.setExecutionMode(joinOp.getExecutionMode());
// Place assign below joinOp.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java
index a2ad732..05cc7b6 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java
@@ -66,7 +66,7 @@
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> decorVarExpr : decorList) {
Mutable<ILogicalExpression> exprRef = decorVarExpr.second;
ILogicalExpression expr = exprRef.getValue();
- if (expr ==null || expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+ if (expr == null || expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
continue;
}
// Rewrites the decoration entry if the decoration expression is not a variable reference expression.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
index 38776c6..2f28a84 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
@@ -43,7 +43,8 @@
public class FactorRedundantGroupAndDecorVarsRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -77,8 +78,8 @@
LogicalVariable lhs = varRhsToLhs.get(v);
if (lhs != null) {
if (p.first != null) {
- AssignOperator assign = new AssignOperator(p.first, new MutableObject<ILogicalExpression>(
- new VariableReferenceExpression(lhs)));
+ AssignOperator assign = new AssignOperator(p.first,
+ new MutableObject<ILogicalExpression>(new VariableReferenceExpression(lhs)));
ILogicalOperator op = opRef.getValue();
assign.getInputs().add(new MutableObject<ILogicalOperator>(op));
opRef.setValue(assign);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java
index a61b1a2..8d54a67 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java
@@ -28,7 +28,8 @@
public class InferTypesRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
index 71fde61..2c825b7 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
@@ -51,7 +51,8 @@
public class InlineSingleReferenceVariablesRule extends InlineVariablesRule {
// Maps from variable to a list of operators using that variable.
- protected Map<LogicalVariable, List<ILogicalOperator>> usedVarsMap = new HashMap<LogicalVariable, List<ILogicalOperator>>();
+ protected Map<LogicalVariable, List<ILogicalOperator>> usedVarsMap =
+ new HashMap<LogicalVariable, List<ILogicalOperator>>();
protected List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
@Override
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
index 641ccfe..081f199 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
@@ -85,12 +85,12 @@
OperatorPropertiesUtil.getFreeVariablesInSelfOrDesc(op1, free1);
if (!free1.isEmpty()) {
OperatorManipulationUtil.ntsToEts(op2Ref, context);
- NestedTupleSourceOperator nts = new NestedTupleSourceOperator(
- new MutableObject<ILogicalOperator>(subplan));
+ NestedTupleSourceOperator nts =
+ new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(subplan));
Mutable<ILogicalOperator> ntsRef = new MutableObject<ILogicalOperator>(nts);
Mutable<ILogicalOperator> innerRef = new MutableObject<ILogicalOperator>(op2);
- InnerJoinOperator join = new InnerJoinOperator(new MutableObject<ILogicalExpression>(
- ConstantExpression.TRUE), ntsRef, innerRef);
+ InnerJoinOperator join = new InnerJoinOperator(
+ new MutableObject<ILogicalExpression>(ConstantExpression.TRUE), ntsRef, innerRef);
op2Ref.setValue(join);
context.computeAndSetTypeEnvironmentForOperator(nts);
context.computeAndSetTypeEnvironmentForOperator(join);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
index da85e0d..a5cc573 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
@@ -43,7 +43,8 @@
public class PullSelectOutOfEqJoin implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
index bbb01dd..43c58e2 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
@@ -110,8 +110,8 @@
for (int j = 0; j < assignVars.size(); j++) {
LogicalVariable first = newAssignOps[0].getVariables().get(j);
LogicalVariable second = newAssignOps[1].getVariables().get(j);
- Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
- first, second, assignVars.get(j));
+ Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping =
+ new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(first, second, assignVars.get(j));
unionOp.getVariableMappings().add(varMapping);
}
context.computeAndSetTypeEnvironmentForOperator(unionOp);
@@ -127,7 +127,7 @@
private AssignOperator createAssignBelowUnionAllBranch(UnionAllOperator unionOp, int inputIndex,
AssignOperator originalAssignOp, Set<LogicalVariable> assignUsedVars, IOptimizationContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
AssignOperator newAssignOp = cloneAssignOperator(originalAssignOp, context);
newAssignOp.getInputs()
.add(new MutableObject<ILogicalOperator>(unionOp.getInputs().get(inputIndex).getValue()));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
index f3f0e02..edd7e23 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
@@ -37,7 +37,8 @@
public class PushAssignDownThroughProductRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
index c04a9d5..d7090d2 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
@@ -84,7 +84,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java
index 7ea1327..192e318 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java
@@ -46,7 +46,8 @@
public class PushGroupByIntoSortRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -67,8 +68,8 @@
Mutable<ILogicalOperator> op2Ref = op.getInputs().get(0).getValue().getInputs().get(0);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op2Ref.getValue();
if (op2.getPhysicalOperator().getOperatorTag() == PhysicalOperatorTag.STABLE_SORT) {
- AbstractStableSortPOperator sortPhysicalOperator = (AbstractStableSortPOperator) op2
- .getPhysicalOperator();
+ AbstractStableSortPOperator sortPhysicalOperator =
+ (AbstractStableSortPOperator) op2.getPhysicalOperator();
if (groupByOperator.getNestedPlans().size() != 1) {
//Sort group-by currently works only for one nested plan with one root containing
//an aggregate and a nested-tuple-source.
@@ -88,13 +89,14 @@
continue;
}
AggregateOperator aggOp = (AggregateOperator) r0.getValue();
- AbstractLogicalOperator aggInputOp = (AbstractLogicalOperator) aggOp.getInputs().get(0)
- .getValue();
+ AbstractLogicalOperator aggInputOp =
+ (AbstractLogicalOperator) aggOp.getInputs().get(0).getValue();
if (aggInputOp.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
continue;
}
- boolean hasIntermediateAggregate = generateMergeAggregationExpressions(groupByOperator, context);
+ boolean hasIntermediateAggregate =
+ generateMergeAggregationExpressions(groupByOperator, context);
if (!hasIntermediateAggregate) {
continue;
}
@@ -132,8 +134,8 @@
"External/sort group-by currently works only for one nested plan with one root containing"
+ "an aggregate and a nested-tuple-source.");
}
- IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
- .getMergeAggregationExpressionFactory();
+ IMergeAggregationExpressionFactory mergeAggregationExpressionFactory =
+ context.getMergeAggregationExpressionFactory();
Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
AggregateOperator aggOp = (AggregateOperator) r0.getValue();
List<Mutable<ILogicalExpression>> aggFuncRefs = aggOp.getExpressions();
@@ -141,8 +143,8 @@
int n = aggOp.getExpressions().size();
List<Mutable<ILogicalExpression>> mergeExpressionRefs = new ArrayList<Mutable<ILogicalExpression>>();
for (int i = 0; i < n; i++) {
- ILogicalExpression mergeExpr = mergeAggregationExpressionFactory.createMergeAggregation(
- originalAggVars.get(i), aggFuncRefs.get(i).getValue(), context);
+ ILogicalExpression mergeExpr = mergeAggregationExpressionFactory
+ .createMergeAggregation(originalAggVars.get(i), aggFuncRefs.get(i).getValue(), context);
if (mergeExpr == null) {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java
index f71af5a..3181459 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java
@@ -53,7 +53,7 @@
if (!OperatorPropertiesUtil.isMovable(op1)) {
return false;
- };
+ } ;
Mutable<ILogicalOperator> op2Ref = op1.getInputs().get(0);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op2Ref.getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
index fc7b98e..aa58985 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
@@ -45,7 +45,8 @@
public class PushNestedOrderByUnderPreSortedGroupByRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
index e05619f..281093a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
@@ -32,7 +32,8 @@
public class PushProjectIntoDataSourceScanRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
index 54c5728..d135846 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
@@ -41,7 +41,8 @@
public class PushSubplanWithAggregateDownThroughProductRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java
index 2da1343..4c2c4da 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java
@@ -37,7 +37,8 @@
public class PushUnnestDownThroughProductRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java
index b6556e9..3ef37cd 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java
@@ -79,11 +79,11 @@
}
LogicalVariable unnestVar1 = context.newVar();
- UnnestOperator unnest1 = new UnnestOperator(unnestVar1, new MutableObject<ILogicalExpression>(unnestOpRef
- .getExpressionRef().getValue().cloneExpression()));
+ UnnestOperator unnest1 = new UnnestOperator(unnestVar1,
+ new MutableObject<ILogicalExpression>(unnestOpRef.getExpressionRef().getValue().cloneExpression()));
LogicalVariable unnestVar2 = context.newVar();
- UnnestOperator unnest2 = new UnnestOperator(unnestVar2, new MutableObject<ILogicalExpression>(unnestOpRef
- .getExpressionRef().getValue().cloneExpression()));
+ UnnestOperator unnest2 = new UnnestOperator(unnestVar2,
+ new MutableObject<ILogicalExpression>(unnestOpRef.getExpressionRef().getValue().cloneExpression()));
//Getting the two topmost branched and adding them as an input to the unnests:
Mutable<ILogicalOperator> branch1 = unionAbstractOp.getInputs().get(0);
@@ -110,10 +110,11 @@
context.computeAndSetTypeEnvironmentForOperator(unnest2);
//creating a new union operator with the updated logical variables
- List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(
- 1);
- Triple<LogicalVariable, LogicalVariable, LogicalVariable> union_triple_vars = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
- unnestVar1, unnestVar2, unnestOpRef.getVariables().get(0));
+ List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap =
+ new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(1);
+ Triple<LogicalVariable, LogicalVariable, LogicalVariable> union_triple_vars =
+ new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(unnestVar1, unnestVar2,
+ unnestOpRef.getVariables().get(0));
varMap.add(union_triple_vars);
UnionAllOperator unionOpFinal = new UnionAllOperator(varMap);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
index b43363a..bf649ab 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
@@ -37,7 +37,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
if (context.checkIfInDontApplySet(this, opRef.getValue())) {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
index 38e97d7..fbaab78 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
@@ -53,7 +53,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() == LogicalOperatorTag.PROJECT) {
Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
index 2f0913b..5386193 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
@@ -69,7 +69,8 @@
public class RemoveRedundantVariablesRule implements IAlgebraicRewriteRule {
private final VariableSubstitutionVisitor substVisitor = new VariableSubstitutionVisitor();
- private final Map<LogicalVariable, List<LogicalVariable>> equivalentVarsMap = new HashMap<LogicalVariable, List<LogicalVariable>>();
+ private final Map<LogicalVariable, List<LogicalVariable>> equivalentVarsMap =
+ new HashMap<LogicalVariable, List<LogicalVariable>>();
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java
index 84d7c9d..e42c067 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java
@@ -110,8 +110,8 @@
for (OrderColumn oc : sme.getSortColumns()) {
ocList.add(oc);
}
- HashPartitionMergeExchangePOperator hpme = new HashPartitionMergeExchangePOperator(ocList,
- hpe.getHashFields(), hpe.getDomain());
+ HashPartitionMergeExchangePOperator hpme =
+ new HashPartitionMergeExchangePOperator(ocList, hpe.getHashFields(), hpe.getDomain());
op1.setPhysicalOperator(hpme);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
index 5bf4e6881..e197814 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
@@ -126,8 +126,8 @@
// since we are sure that the output of UNIONALL operator is used
// afterwards.
if (opRef.getValue().getOperatorTag() == LogicalOperatorTag.UNIONALL) {
- Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter = ((UnionAllOperator) opRef
- .getValue()).getVariableMappings().iterator();
+ Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter =
+ ((UnionAllOperator) opRef.getValue()).getVariableMappings().iterator();
while (iter.hasNext()) {
Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = iter.next();
survivedUnionSourceVarSet.add(varMapping.first);
@@ -240,8 +240,8 @@
}
private boolean removeUnusedVarsFromUnionAll(UnionAllOperator unionOp, Set<LogicalVariable> toRemove) {
- Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter = unionOp.getVariableMappings()
- .iterator();
+ Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter =
+ unionOp.getVariableMappings().iterator();
boolean modified = false;
if (toRemove != null && !toRemove.isEmpty()) {
while (iter.hasNext()) {
@@ -361,8 +361,8 @@
} else {
// A decor var mapping can have a variable reference expression without a new variable
// definition, which is for rebinding the referred variable.
- VariableReferenceExpression varExpr = (VariableReferenceExpression) decorMapping.second
- .getValue();
+ VariableReferenceExpression varExpr =
+ (VariableReferenceExpression) decorMapping.second.getValue();
LogicalVariable reboundDecorVar = varExpr.getVariableReference();
assignVarsSetInThisOp.add(reboundDecorVar);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
index 84961d6..5b6285a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
@@ -172,8 +172,7 @@
boolean hasIntermediateAgg = generateMergeAggregationExpressions(gby, context);
if (hasIntermediateAgg) {
ExternalGroupByPOperator externalGby = new ExternalGroupByPOperator(
- gby.getGroupByList(),
- physicalOptimizationConfig.getMaxFramesForGroupBy(),
+ gby.getGroupByList(), physicalOptimizationConfig.getMaxFramesForGroupBy(),
(long) physicalOptimizationConfig.getMaxFramesForGroupBy()
* physicalOptimizationConfig.getFrameSize());
op.setPhysicalOperator(externalGby);
@@ -354,8 +353,9 @@
prevSecondaryKeys = new ArrayList<LogicalVariable>();
getKeys(opInsDel.getPrevSecondaryKeyExprs(), prevSecondaryKeys);
if (opInsDel.getPrevAdditionalFilteringExpression() != null) {
- prevAdditionalFilteringKey = ((VariableReferenceExpression) (opInsDel
- .getPrevAdditionalFilteringExpression()).getValue()).getVariableReference();
+ prevAdditionalFilteringKey =
+ ((VariableReferenceExpression) (opInsDel.getPrevAdditionalFilteringExpression())
+ .getValue()).getVariableReference();
}
}
op.setPhysicalOperator(new IndexInsertDeleteUpsertPOperator(primaryKeys, secondaryKeys,
@@ -441,8 +441,8 @@
"External group-by currently works only for one nested plan with one root containing"
+ "an aggregate and a nested-tuple-source.");
}
- IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
- .getMergeAggregationExpressionFactory();
+ IMergeAggregationExpressionFactory mergeAggregationExpressionFactory =
+ context.getMergeAggregationExpressionFactory();
Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
AbstractLogicalOperator r0Logical = (AbstractLogicalOperator) r0.getValue();
if (r0Logical.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java
index 32c7e03..4cd15a0 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java
@@ -74,8 +74,8 @@
private boolean elimOneSubplanWithNoFreeVars(Mutable<ILogicalOperator> opRef) {
SubplanOperator subplan = (SubplanOperator) opRef.getValue();
- AbstractLogicalOperator rootOp = (AbstractLogicalOperator) subplan.getNestedPlans().get(0).getRoots().get(0)
- .getValue();
+ AbstractLogicalOperator rootOp =
+ (AbstractLogicalOperator) subplan.getNestedPlans().get(0).getRoots().get(0).getValue();
if (rootOp.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE
|| rootOp.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
opRef.setValue(subplan.getInputs().get(0).getValue());
@@ -115,8 +115,8 @@
if (topOp == null) {
topOp = r.getValue();
} else {
- InnerJoinOperator j = new InnerJoinOperator(
- new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
+ InnerJoinOperator j =
+ new InnerJoinOperator(new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
j.getInputs().add(new MutableObject<ILogicalOperator>(topOp));
j.getInputs().add(r);
ctx.setOutputTypeEnvironment(j, j.computeOutputTypeEnvironment(ctx));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java
index 607ea1f..e2576ba 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java
@@ -122,8 +122,8 @@
// Replaces all Nts' in the nested plan with the Subplan input operator or its deep copy.
ILogicalOperator topOperator = rootRefs.get(0).getValue();
- ReplaceNtsWithSubplanInputOperatorVisitor visitor = new ReplaceNtsWithSubplanInputOperatorVisitor(context,
- subplan);
+ ReplaceNtsWithSubplanInputOperatorVisitor visitor =
+ new ReplaceNtsWithSubplanInputOperatorVisitor(context, subplan);
ILogicalOperator newTopOperator = topOperator.accept(visitor, null);
currentOpRef.setValue(newTopOperator);
OperatorManipulationUtil.computeTypeEnvironmentBottomUp(newTopOperator, context);
@@ -168,7 +168,7 @@
*/
private void isCardinalityOne(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> freeVars,
Set<LogicalVariable> varsWithCardinalityOne, Set<LogicalVariable> varsLiveAtUnnestAndJoin)
- throws AlgebricksException {
+ throws AlgebricksException {
AbstractLogicalOperator operator = (AbstractLogicalOperator) opRef.getValue();
List<LogicalVariable> liveVars = new ArrayList<>();
VariableUtilities.getLiveVariables(operator, liveVars);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
index 942f181..c4ea604 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
@@ -236,8 +236,8 @@
}
if (testForNull == null) {
testForNull = context.newVar();
- AssignOperator tmpAsgn = new AssignOperator(testForNull,
- new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
+ AssignOperator tmpAsgn =
+ new AssignOperator(testForNull, new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
rightRef.setValue(tmpAsgn);
context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
@@ -247,10 +247,10 @@
ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
- ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
- new MutableObject<ILogicalExpression>(isNullTest));
- SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
- null);
+ ScalarFunctionCallExpression nonNullTest =
+ new ScalarFunctionCallExpression(finfoNot, new MutableObject<ILogicalExpression>(isNullTest));
+ SelectOperator selectNonNull =
+ new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false, null);
GroupByOperator g = new GroupByOperator();
Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java
index fa893d5..94cae74 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java
@@ -125,7 +125,7 @@
LogicalOperatorTag operatorTag = currentOperator.getOperatorTag();
if (operatorTag == LogicalOperatorTag.AGGREGATE || operatorTag == LogicalOperatorTag.RUNNINGAGGREGATE
|| operatorTag == LogicalOperatorTag.GROUP) {
- return false;
+ return false;
}
if (operatorTag == LogicalOperatorTag.PROJECT) {
Set<LogicalVariable> producedVars = new HashSet<>();
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java
index 35c7e4e..d9acf53 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java
@@ -109,8 +109,8 @@
**/
Mutable<ILogicalExpression> expr = new MutableObject<ILogicalExpression>(ConstantExpression.TRUE);
Mutable<ILogicalOperator> nestedRootRef = nestedRoots.get(0);
- ILogicalOperator join = new InnerJoinOperator(expr, new MutableObject<ILogicalOperator>(subplanInput),
- nestedRootRef);
+ ILogicalOperator join =
+ new InnerJoinOperator(expr, new MutableObject<ILogicalOperator>(subplanInput), nestedRootRef);
/** rewrite the nested tuple source to be empty tuple source */
rewriteNestedTupleSource(nestedRootRef, context);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java
index af95ecd..9d3b311 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java
@@ -154,8 +154,8 @@
// Sets the nts for a original subplan.
Mutable<ILogicalOperator> originalGbyRootOpRef = gbyNestedPlan.getRoots().get(rootIndex);
Mutable<ILogicalOperator> originalGbyNtsRef = downToNts(originalGbyRootOpRef);
- NestedTupleSourceOperator originalNts = (NestedTupleSourceOperator) originalGbyNtsRef
- .getValue();
+ NestedTupleSourceOperator originalNts =
+ (NestedTupleSourceOperator) originalGbyNtsRef.getValue();
originalNts.setDataSourceReference(new MutableObject<>(gby));
// Pushes a new subplan if possible.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
index 080828d..35aa984 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
@@ -141,8 +141,8 @@
isOriginalCopyUsed = true;
return subplanInputOperator;
}
- LogicalOperatorDeepCopyWithNewVariablesVisitor visitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx,
- ctx);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor visitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx, ctx);
ILogicalOperator copiedSubplanInputOperator = visitor.deepCopy(subplanInputOperator);
varMap.putAll(visitor.getInputToOutputVariableMapping());
return copiedSubplanInputOperator;
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java
index 049e853..b9b2cee 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java
@@ -51,7 +51,8 @@
public class SubplanOutOfGroupRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
index 99480bf..15ae32a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
@@ -41,15 +41,16 @@
visitOperatorAndItsDescendants(op, visitor, ctx);
}
- public static <R> void visitOperatorAndItsDescendants(ILogicalOperator op, ILogicalOperatorVisitor<R, IOptimizationContext> visitor,
- IOptimizationContext ctx) throws AlgebricksException {
+ public static <R> void visitOperatorAndItsDescendants(ILogicalOperator op,
+ ILogicalOperatorVisitor<R, IOptimizationContext> visitor, IOptimizationContext ctx)
+ throws AlgebricksException {
Set<ILogicalOperator> visitSet = new HashSet<ILogicalOperator>();
computeFDsAndEqClassesWithVisitorRec(op, ctx, visitor, visitSet);
}
private static <R> void computeFDsAndEqClassesWithVisitorRec(ILogicalOperator op, IOptimizationContext ctx,
ILogicalOperatorVisitor<R, IOptimizationContext> visitor, Set<ILogicalOperator> visitSet)
- throws AlgebricksException {
+ throws AlgebricksException {
visitSet.add(op);
for (Mutable<ILogicalOperator> i : op.getInputs()) {
computeFDsAndEqClassesWithVisitorRec((AbstractLogicalOperator) i.getValue(), ctx, visitor, visitSet);
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
index 1e06c76..28590ec 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
@@ -93,8 +93,8 @@
int fieldSlotLength = stateAccessor.getFieldSlotsLength();
for (int i = 0; i < aggs.length; i++) {
byte[] data = stateAccessor.getBuffer().array();
- int start = stateAccessor.getFieldStartOffset(stateTupleIndex, i + keys.length)
- + stateTupleStart + fieldSlotLength;
+ int start = stateAccessor.getFieldStartOffset(stateTupleIndex, i + keys.length) + stateTupleStart
+ + fieldSlotLength;
aggs[i].step(ftr, data, start, stateFieldLength[i]);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
index f6a349f..7e04750 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
@@ -73,8 +73,8 @@
RecordDescriptor pipelineOutputRecordDescriptor = null;
- final PipelineAssembler pa = new PipelineAssembler(pipeline, 1, 1, inputRecordDesc,
- pipelineOutputRecordDescriptor);
+ final PipelineAssembler pa =
+ new PipelineAssembler(pipeline, 1, 1, inputRecordDesc, pipelineOutputRecordDescriptor);
final IMissingWriter[] nullWriters = new IMissingWriter[missingWriterFactories.length];
for (int i = 0; i < missingWriterFactories.length; i++) {
nullWriters[i] = missingWriterFactories[i].createMissingWriter();
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
index bb8223d..f251bb7 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
@@ -44,10 +44,9 @@
public InMemorySortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory firstKeyNormalizerFactory,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList) {
- this(sortFields,
- firstKeyNormalizerFactory != null ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory }
- : null,
- comparatorFactories, projectionList);
+ this(sortFields, firstKeyNormalizerFactory != null
+ ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory } : null, comparatorFactories,
+ projectionList);
}
public InMemorySortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory[] keyNormalizerFactories,
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
index 0e49d22..763e6ff 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
@@ -47,8 +47,8 @@
@Override
public IResultSerializer createResultSerializer(RecordDescriptor inputRecordDesc, PrintStream printStream) {
- final IAWriter writer = writerFactory.createWriter(fields, printStream, printerFactories,
- inputRecordDesc);
+ final IAWriter writer =
+ writerFactory.createWriter(fields, printStream, printerFactories, inputRecordDesc);
return new IResultSerializer() {
private static final long serialVersionUID = 1L;
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java b/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
index 26790c5..afbff40 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
@@ -48,7 +48,7 @@
private ArrayBackedValueStorage buf = new ArrayBackedValueStorage();
{
- IntegerSerializerDeserializer.INSTANCE.serialize(value, buf.getDataOutput());
+ IntegerSerializerDeserializer.INSTANCE.serialize(value, buf.getDataOutput());
}
@Override
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
index cc4c1b9..40e2ec6 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
@@ -141,9 +141,9 @@
PrinterRuntimeFactory printer = new PrinterRuntimeFactory(new int[] { 0, 1 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, assignDesc);
- AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
- new IPushRuntimeFactory[] { ets, assign, printer },
- new RecordDescriptor[] { etsDesc, assignDesc, null });
+ AlgebricksMetaOperatorDescriptor algebricksOp =
+ new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, assign, printer },
+ new RecordDescriptor[] { etsDesc, assignDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
@@ -168,9 +168,9 @@
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, outFile,
PrinterBasedWriterFactory.INSTANCE, assignDesc);
- AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
- new IPushRuntimeFactory[] { ets, assign, writer },
- new RecordDescriptor[] { etsDesc, assignDesc, null });
+ AlgebricksMetaOperatorDescriptor algebricksOp =
+ new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, assign, writer },
+ new RecordDescriptor[] { etsDesc, assignDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
@@ -187,28 +187,28 @@
// the scanner
FileSplit[] intFileSplits = new FileSplit[1];
- intFileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "simple" + File.separator + "int-part1.tbl");
+ intFileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "simple" + File.separator + "int-part1.tbl");
IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(intFileSplits);
- RecordDescriptor intScannerDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor intScannerDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE };
FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), intScannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, intScanner, DEFAULT_NODES);
// the algebricks op.
- IScalarEvaluatorFactory cond = new IntegerGreaterThanEvalFactory(new IntegerConstantEvalFactory(2),
- new TupleFieldEvaluatorFactory(0));
+ IScalarEvaluatorFactory cond =
+ new IntegerGreaterThanEvalFactory(new IntegerConstantEvalFactory(2), new TupleFieldEvaluatorFactory(0));
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 0 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
RecordDescriptor selectDesc = intScannerDesc;
String filePath = PATH_ACTUAL + SEPARATOR + "scanSelectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- selectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
@@ -240,14 +240,14 @@
RecordDescriptor assignDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project = new StreamProjectRuntimeFactory(new int[] { 1 });
- RecordDescriptor projectDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor projectDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignProjectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- projectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, projectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign, project, writer },
@@ -270,8 +270,8 @@
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
- fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl");
+ fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -290,14 +290,14 @@
// the algebricks op.
StreamLimitRuntimeFactory limit = new StreamLimitRuntimeFactory(new IntegerConstantEvalFactory(2), null,
new int[] { 0 }, BinaryIntegerInspectorImpl.FACTORY);
- RecordDescriptor limitDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor limitDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanLimitWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- limitDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, limitDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { limit, writer }, new RecordDescriptor[] { limitDesc, null });
@@ -323,18 +323,18 @@
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
- RecordDescriptor unnestDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor unnestDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- unnestDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, unnestDesc);
- AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
- new IPushRuntimeFactory[] { ets, unnest, writer },
- new RecordDescriptor[] { etsDesc, unnestDesc, null });
+ AlgebricksMetaOperatorDescriptor algebricksOp =
+ new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, unnest, writer },
+ new RecordDescriptor[] { etsDesc, unnestDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
@@ -373,14 +373,14 @@
// the algebricks op.
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
- RecordDescriptor aggDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor aggDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanAggregateWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- aggDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, aggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { agg, writer }, new RecordDescriptor[] { aggDesc, null });
@@ -404,8 +404,8 @@
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
- fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl");
+ fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
@@ -436,8 +436,8 @@
RecordDescriptor ntsDesc = sortDesc;
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
- RecordDescriptor aggDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor aggDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg },
new RecordDescriptor[] { ntsDesc, aggDesc });
NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(
@@ -452,18 +452,18 @@
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the algebricks op.
- IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3),
- new TupleFieldEvaluatorFactory(0)); // Canadian customers
+ IScalarEvaluatorFactory cond =
+ new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3), new TupleFieldEvaluatorFactory(0)); // Canadian customers
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
- RecordDescriptor selectDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor selectDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- selectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
@@ -491,8 +491,8 @@
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
- RecordDescriptor unnestDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor unnestDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
RunningAggregateRuntimeFactory ragg = new RunningAggregateRuntimeFactory(new int[] { 1 },
new IRunningAggregateEvaluatorFactory[] { new TupleCountRunningAggregateFunctionFactory() },
@@ -502,9 +502,9 @@
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestRunningaggregateWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 1 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- raggDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 1 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, raggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, unnest, ragg, writer },
@@ -584,13 +584,13 @@
String inputFileName = "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl";
- FileSplit[] inputSplits = new FileSplit[] {
- new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName) };
+ FileSplit[] inputSplits =
+ new FileSplit[] { new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName) };
DelimitedDataTupleParserFactory stringParser = new DelimitedDataTupleParserFactory(
new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, '\u0000');
- RecordDescriptor stringRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
+ RecordDescriptor stringRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
FileScanOperatorDescriptor scanOp = new FileScanOperatorDescriptor(spec,
new ConstantFileSplitProvider(inputSplits), stringParser, stringRec);
@@ -624,8 +624,8 @@
AlgebricksHyracksIntegrationUtil.runJob(spec);
for (int i = 0; i < outputArity; i++) {
- compareFiles("data" + File.separator + "device0" + File.separator + inputFileName, outputFile[i]
- .getAbsolutePath());
+ compareFiles("data" + File.separator + "device0" + File.separator + inputFileName,
+ outputFile[i].getAbsolutePath());
}
}
@@ -643,8 +643,8 @@
JobSpecification spec = new JobSpecification(FRAME_SIZE);
- String inputFileName[] = { "data" + File.separator + "simple" + File.separator + "int-string-part1.tbl", "data"
- + File.separator + "simple" + File.separator + "int-string-part1-split-0.tbl",
+ String inputFileName[] = { "data" + File.separator + "simple" + File.separator + "int-string-part1.tbl",
+ "data" + File.separator + "simple" + File.separator + "int-string-part1-split-0.tbl",
"data" + File.separator + "simple" + File.separator + "int-string-part1-split-1.tbl" };
File[] inputFiles = new File[inputFileName.length];
for (int i = 0; i < inputFileName.length; i++) {
@@ -657,16 +657,15 @@
outputFile[i] = outputFileSplit[i].getFile(AlgebricksHyracksIntegrationUtil.nc1.getIoManager());
}
- FileSplit[] inputSplits = new FileSplit[] {
- new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName[0]) };
+ FileSplit[] inputSplits =
+ new FileSplit[] { new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName[0]) };
IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(inputSplits);
- RecordDescriptor scannerDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() });
- IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE };
+ IValueParserFactory[] valueParsers =
+ new IValueParserFactory[] { IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
@@ -697,8 +696,8 @@
AlgebricksHyracksIntegrationUtil.runJob(spec);
for (int i = 0; i < outputArity; i++) {
- compareFiles("data" + File.separator + "device0" + File.separator + inputFileName[i + 1], outputFile[i]
- .getAbsolutePath());
+ compareFiles("data" + File.separator + "device0" + File.separator + inputFileName[i + 1],
+ outputFile[i].getAbsolutePath());
}
}
@@ -708,8 +707,8 @@
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
- fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "nation.tbl");
+ fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "nation.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
@@ -761,22 +760,24 @@
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
- AssignRuntimeFactory assign1 = new AssignRuntimeFactory(new int[] { 0 },
- new IScalarEvaluatorFactory[] { const1 }, new int[] { 0 });
- RecordDescriptor assign1Desc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ AssignRuntimeFactory assign1 =
+ new AssignRuntimeFactory(new int[] { 0 }, new IScalarEvaluatorFactory[] { const1 }, new int[] { 0 });
+ RecordDescriptor assign1Desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
- AssignRuntimeFactory assign2 = new AssignRuntimeFactory(new int[] { 1 },
- new IScalarEvaluatorFactory[] { new IntegerAddEvalFactory(new TupleFieldEvaluatorFactory(0), const2) },
- new int[] { 0, 1 });
+ AssignRuntimeFactory assign2 =
+ new AssignRuntimeFactory(new int[] { 1 },
+ new IScalarEvaluatorFactory[] {
+ new IntegerAddEvalFactory(new TupleFieldEvaluatorFactory(0), const2) },
+ new int[] { 0, 1 });
RecordDescriptor assign2Desc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project1 = new StreamProjectRuntimeFactory(new int[] { 1 });
- RecordDescriptor project1Desc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor project1Desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, assign2, project1 },
new RecordDescriptor[] { assign1Desc, assign2Desc, project1Desc });
@@ -788,14 +789,14 @@
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project2 = new StreamProjectRuntimeFactory(new int[] { 1 });
- RecordDescriptor project2Desc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor project2Desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignSubplanProjectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- project2Desc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, project2Desc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign1, subplan, project2, writer },
@@ -847,31 +848,33 @@
RecordDescriptor ntsDesc = sortDesc;
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
- RecordDescriptor aggDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor aggDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg },
new RecordDescriptor[] { ntsDesc, aggDesc });
NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(
new AlgebricksPipeline[] { pipeline }, new int[] { 3 }, new int[] {});
RecordDescriptor gbyDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
- MicroPreClusteredGroupRuntimeFactory gby = new MicroPreClusteredGroupRuntimeFactory(new int[] { 3 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, npaaf,
- sortDesc, gbyDesc, null);
+ MicroPreClusteredGroupRuntimeFactory gby =
+ new MicroPreClusteredGroupRuntimeFactory(new int[] { 3 },
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ npaaf, sortDesc, gbyDesc, null);
// the algebricks op.
- IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3),
- new TupleFieldEvaluatorFactory(0)); // Canadian customers
+ IScalarEvaluatorFactory cond =
+ new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3), new TupleFieldEvaluatorFactory(0)); // Canadian customers
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
- RecordDescriptor selectDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor selectDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- selectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { sort, gby, select, writer },
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java
index 6770494..82be109 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java
@@ -66,8 +66,8 @@
WriteValueTools.writeInt(i, baaos);
byte[] goal = Integer.toString(i).getBytes();
if (baaos.size() != goal.length) {
- throw new Exception("Expecting to write " + i + " in " + goal.length + " bytes, but found " + baaos.size()
- + " bytes.");
+ throw new Exception(
+ "Expecting to write " + i + " in " + goal.length + " bytes, but found " + baaos.size() + " bytes.");
}
for (int k = 0; k < goal.length; k++) {
if (goal[k] != baaos.getByteArray()[k]) {
@@ -82,8 +82,8 @@
WriteValueTools.writeLong(x, baaos);
byte[] goal = Long.toString(x).getBytes();
if (baaos.size() != goal.length) {
- throw new Exception("Expecting to write " + x + " in " + goal.length + " bytes, but found " + baaos.size()
- + " bytes.");
+ throw new Exception(
+ "Expecting to write " + x + " in " + goal.length + " bytes, but found " + baaos.size() + " bytes.");
}
for (int k = 0; k < goal.length; k++) {
if (goal[k] != baaos.getByteArray()[k]) {
@@ -100,8 +100,8 @@
WriteValueTools.writeUTF8StringWithQuotes(str, baaos);
byte[] b = str.getBytes("UTF-8");
if (baaos.size() != b.length + 2) {
- throw new Exception("Expecting to write " + b + " in " + b.length + " bytes, but found " + baaos.size()
- + " bytes.");
+ throw new Exception(
+ "Expecting to write " + b + " in " + b.length + " bytes, but found " + baaos.size() + " bytes.");
}
if (baaos.getByteArray()[0] != '\"' || baaos.getByteArray()[baaos.size() - 1] != '\"') {
throw new Exception("Missing quotes.");
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
index 2971b72..1df9824 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
@@ -65,8 +65,7 @@
ncConfig1.setClusterListenAddress("127.0.0.1");
ncConfig1.setDataListenAddress("127.0.0.1");
ncConfig1.setResultListenAddress("127.0.0.1");
- ncConfig1.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data",
- "device0") });
+ ncConfig1.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
FileUtils.forceMkdir(new File(ncConfig1.getIODevices()[0]));
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
@@ -77,8 +76,7 @@
ncConfig2.setClusterListenAddress("127.0.0.1");
ncConfig2.setDataListenAddress("127.0.0.1");
ncConfig2.setResultListenAddress("127.0.0.1");
- ncConfig2.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data",
- "device1") });
+ ncConfig2.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
FileUtils.forceMkdir(new File(ncConfig1.getIODevices()[0]));
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
index cd6362f..e2cd923 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
@@ -118,10 +118,10 @@
for (int i = 0; i < nActivityOutputs; ++i) {
IConnectorDescriptor conn = aOutputs.get(i);
ac.addConnector(conn);
- Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> pcPair = jag.getConnectorActivityMap()
- .get(conn.getConnectorId());
- ac.connect(conn, activity, i, pcPair.getRight().getLeft(), pcPair.getRight().getRight(), jag
- .getConnectorRecordDescriptorMap().get(conn.getConnectorId()));
+ Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> pcPair =
+ jag.getConnectorActivityMap().get(conn.getConnectorId());
+ ac.connect(conn, activity, i, pcPair.getRight().getLeft(), pcPair.getRight().getRight(),
+ jag.getConnectorRecordDescriptorMap().get(conn.getConnectorId()));
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
index 64bcf6e..b4d5ba4 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
@@ -101,8 +101,8 @@
}
public void finish() {
- Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> caMap = jag
- .getConnectorActivityMap();
+ Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> caMap =
+ jag.getConnectorActivityMap();
connectorProducerMap
.forEach((cdId, producer) -> caMap.put(cdId, Pair.of(producer, connectorConsumerMap.get(cdId))));
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java
index cd74659..04c27be 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java
@@ -43,7 +43,7 @@
/**
* Indicate the total size of the meta data.
*/
- int META_DATA_LEN = SIZE_LEN + TUPLE_START_OFFSET;
+ int META_DATA_LEN = SIZE_LEN + TUPLE_START_OFFSET;
boolean DEBUG_FRAME_IO = false;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java
index 68533c6..1242ba0 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java
@@ -38,7 +38,8 @@
}
public static void serializeFrameSize(ByteBuffer outputFrame, int start, int numberOfMinFrame) {
- IntSerDeUtils.putInt(outputFrame.array(), start + FrameConstants.META_DATA_FRAME_COUNT_OFFSET, numberOfMinFrame);
+ IntSerDeUtils.putInt(outputFrame.array(), start + FrameConstants.META_DATA_FRAME_COUNT_OFFSET,
+ numberOfMinFrame);
}
public static int deserializeNumOfMinFrame(ByteBuffer frame) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java
index 64fa322..9a2db6f 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java
@@ -33,11 +33,11 @@
boolean append(IFrameTupleAccessor tupleAccessor, int tStartOffset, int tEndOffset) throws HyracksDataException;
- boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1,
- int tIndex1) throws HyracksDataException;
+ boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1)
+ throws HyracksDataException;
- boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, int[] fieldSlots1, byte[] bytes1,
- int offset1, int dataLen1) throws HyracksDataException;
+ boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, int[] fieldSlots1, byte[] bytes1, int offset1,
+ int dataLen1) throws HyracksDataException;
boolean appendProjection(IFrameTupleAccessor accessor, int tIndex, int[] fields) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
index d42cbb3..402f02e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
@@ -60,38 +60,38 @@
}
default long getLong(IOption option) {
- return (long)get(option);
+ return (long) get(option);
}
default int getInt(IOption option) {
- return (int)get(option);
+ return (int) get(option);
}
default short getShort(IOption option) {
- return (short)get(option);
+ return (short) get(option);
}
default String getString(IOption option) {
- return (String)get(option);
+ return (String) get(option);
}
default boolean getBoolean(IOption option) {
- return (boolean)get(option);
+ return (boolean) get(option);
}
default Level getLoggingLevel(IOption option) {
- return (Level)get(option);
+ return (Level) get(option);
}
default double getDouble(IOption option) {
- return (double)get(option);
+ return (double) get(option);
}
- default String [] getStringArray(IOption option) {
- return (String [])get(option);
+ default String[] getStringArray(IOption option) {
+ return (String[]) get(option);
}
default URL getURL(IOption option) {
- return (URL)get(option);
+ return (URL) get(option);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
index ed6dcd0..5f11214 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
@@ -56,7 +56,9 @@
/**
* @return a true value indicates this option should not be advertised (e.g. command-line usage, documentation)
*/
- default boolean hidden() { return false; }
+ default boolean hidden() {
+ return false;
+ }
default String cmdline() {
return "-" + name().toLowerCase().replace("_", "-");
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
index 95f5e1a..762fdee 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
@@ -26,8 +26,8 @@
public class PartitionConstraintHelper {
public static void addPartitionCountConstraint(JobSpecification spec, IOperatorDescriptor op, int count) {
- spec.addUserConstraint(new Constraint(new PartitionCountExpression(op.getOperatorId()), new ConstantExpression(
- count)));
+ spec.addUserConstraint(
+ new Constraint(new PartitionCountExpression(op.getOperatorId()), new ConstantExpression(count)));
}
public static void addAbsoluteLocationConstraint(JobSpecification spec, IOperatorDescriptor op,
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java
index 5afcf69..1206d8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java
@@ -20,7 +20,7 @@
import org.apache.hyracks.api.io.IIOManager;
-public interface IHyracksCommonContext extends IHyracksFrameMgrContext{
+public interface IHyracksCommonContext extends IHyracksFrameMgrContext {
public IIOManager getIoManager();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
index 23ae97b..8b514f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
@@ -18,7 +18,6 @@
*/
package org.apache.hyracks.api.dataflow.value;
-
import com.fasterxml.jackson.databind.node.ObjectNode;
public interface JSONSerializable {
@@ -27,5 +26,5 @@
*
* @return A om.createObjectNode instance representing this Java object.
*/
- public ObjectNode toJSON() ;
+ public ObjectNode toJSON();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
index 34c58f8..a2d28e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
@@ -48,8 +48,7 @@
public IODeviceHandle(File mount, String workspace) {
this.mount = mount;
this.workspace = workspace == null ? null
- : workspace.endsWith(File.separator) ? workspace.substring(0, workspace.length() - 1)
- : workspace;
+ : workspace.endsWith(File.separator) ? workspace.substring(0, workspace.length() - 1) : workspace;
}
public File getMount() {
@@ -78,7 +77,7 @@
* comma separated list of devices
* @return
*/
- public static List<IODeviceHandle> getDevices(String [] ioDevices) {
+ public static List<IODeviceHandle> getDevices(String[] ioDevices) {
List<IODeviceHandle> devices = new ArrayList<>();
for (String ioDevice : ioDevices) {
String devPath = ioDevice.trim();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
index e5fad32..94e9c74 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
@@ -74,7 +74,8 @@
connectorRecordDescriptorMap = new HashMap<ConnectorDescriptorId, RecordDescriptor>();
activityInputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
activityOutputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
- connectorActivityMap = new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
+ connectorActivityMap =
+ new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
blocked2blockerMap = new HashMap<ActivityId, Set<ActivityId>>();
dependencies = new ArrayList<ActivityCluster>();
}
@@ -108,8 +109,7 @@
}
insertIntoIndexedMap(activityInputMap, consumerActivity.getActivityId(), consumerPort, connector);
insertIntoIndexedMap(activityOutputMap, producerActivity.getActivityId(), producerPort, connector);
- connectorActivityMap.put(
- connector.getConnectorId(),
+ connectorActivityMap.put(connector.getConnectorId(),
Pair.<Pair<IActivity, Integer>, Pair<IActivity, Integer>> of(
Pair.<IActivity, Integer> of(producerActivity, producerPort),
Pair.<IActivity, Integer> of(consumerActivity, consumerPort)));
@@ -187,7 +187,7 @@
vList.set(index, value);
}
- public JsonNode toJSON() {
+ public JsonNode toJSON() {
ObjectMapper om = new ObjectMapper();
ArrayNode jans = om.createArrayNode();
ObjectNode jac = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
index b64e2d5..5816c8f 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
@@ -161,7 +161,7 @@
return ac.getProducerActivity(cid);
}
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode acgj = om.createObjectNode();
ArrayNode acl = om.createArrayNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
index d23b944..bfa126a 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
@@ -25,8 +25,8 @@
import org.apache.hyracks.api.exceptions.HyracksException;
public interface IActivityClusterGraphGeneratorFactory extends Serializable {
- public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(
- ICCServiceContext ccServiceCtx, Set<JobFlag> jobFlags) throws HyracksException;
+ public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(ICCServiceContext ccServiceCtx,
+ Set<JobFlag> jobFlags) throws HyracksException;
public JobSpecification getJobSpecification();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java
index de2759c..19d8484 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java
@@ -55,7 +55,8 @@
connectorRecordDescriptorMap = new HashMap<ConnectorDescriptorId, RecordDescriptor>();
activityInputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
activityOutputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
- connectorActivityMap = new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
+ connectorActivityMap =
+ new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
blocked2blockerMap = new HashMap<ActivityId, Set<ActivityId>>();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java
index 713219e..2073728 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java
@@ -33,7 +33,8 @@
private final Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations;
- public JobInfo(JobId jobId, JobStatus jobStatus, Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations) {
+ public JobInfo(JobId jobId, JobStatus jobStatus,
+ Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations) {
this.jobId = jobId;
this.operatorLocations = operatorLocations;
this.status = jobStatus;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java
index d8c9a9c..7f3194e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java
@@ -27,7 +27,8 @@
public class JobSerializerDeserializerContainer implements IJobSerializerDeserializerContainer {
private IJobSerializerDeserializer defaultJobSerDe = new JobSerializerDeserializer();
- private Map<DeploymentId, IJobSerializerDeserializer> jobSerializerDeserializerMap = new ConcurrentHashMap<DeploymentId, IJobSerializerDeserializer>();
+ private Map<DeploymentId, IJobSerializerDeserializer> jobSerializerDeserializerMap =
+ new ConcurrentHashMap<DeploymentId, IJobSerializerDeserializer>();
@Override
public synchronized IJobSerializerDeserializer getJobSerializerDeserializer(DeploymentId deploymentId) {
@@ -39,7 +40,8 @@
}
@Override
- public synchronized void addJobSerializerDeserializer(DeploymentId deploymentId, IJobSerializerDeserializer jobSerDe) {
+ public synchronized void addJobSerializerDeserializer(DeploymentId deploymentId,
+ IJobSerializerDeserializer jobSerDe) {
jobSerializerDeserializerMap.put(deploymentId, jobSerDe);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
index 7cdb300..f3059c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
@@ -48,8 +48,8 @@
* @author yingyib
*/
public class ActivityClusterGraphRewriter {
- private static final String ONE_TO_ONE_CONNECTOR = "org.apache.hyracks.dataflow.std.connectors."
- + "OneToOneConnectorDescriptor";
+ private static final String ONE_TO_ONE_CONNECTOR =
+ "org.apache.hyracks.dataflow.std.connectors." + "OneToOneConnectorDescriptor";
/**
* rewrite an activity cluster graph to eliminate
@@ -90,8 +90,8 @@
replacedBlockers = new HashSet<>();
for (ActivityId blocker : blockers) {
replacedBlockers.add(invertedAid2SuperAidMap.get(blocker));
- ActivityCluster dependingAc = ac.getActivityClusterGraph().getActivityMap()
- .get(invertedAid2SuperAidMap.get(blocker));
+ ActivityCluster dependingAc =
+ ac.getActivityClusterGraph().getActivityMap().get(invertedAid2SuperAidMap.get(blocker));
if (!ac.getDependencies().contains(dependingAc)) {
ac.getDependencies().add(dependingAc);
}
@@ -122,8 +122,8 @@
Map<ActivityId, IActivity> activities = ac.getActivityMap();
Map<ActivityId, List<IConnectorDescriptor>> activityInputMap = ac.getActivityInputMap();
Map<ActivityId, List<IConnectorDescriptor>> activityOutputMap = ac.getActivityOutputMap();
- Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> connectorActivityMap = ac
- .getConnectorActivityMap();
+ Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> connectorActivityMap =
+ ac.getConnectorActivityMap();
ActivityClusterGraph acg = ac.getActivityClusterGraph();
Map<ActivityId, IActivity> startActivities = new HashMap<>();
Map<ActivityId, SuperActivity> superActivities = new HashMap<>();
@@ -177,8 +177,8 @@
List<IConnectorDescriptor> outputConnectors = activityOutputMap.get(expendingActivity.getActivityId());
if (outputConnectors != null) {
for (IConnectorDescriptor outputConn : outputConnectors) {
- Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints = connectorActivityMap
- .get(outputConn.getConnectorId());
+ Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints =
+ connectorActivityMap.get(outputConn.getConnectorId());
IActivity newActivity = endPoints.getRight().getLeft();
SuperActivity existingSuperActivity = invertedActivitySuperActivityMap.get(newActivity);
if (outputConn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
index 68041bb..a93cb17 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
@@ -38,10 +38,14 @@
private static final long serialVersionUID = 1L;
- protected final Map<Integer, Pair<ActivityId, Integer>> clusterInputIndexMap = new HashMap<Integer, Pair<ActivityId, Integer>>();
- protected final Map<Integer, Pair<ActivityId, Integer>> clusterOutputIndexMap = new HashMap<Integer, Pair<ActivityId, Integer>>();
- protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterOutputIndexMap = new HashMap<Pair<ActivityId, Integer>, Integer>();
- protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterInputIndexMap = new HashMap<Pair<ActivityId, Integer>, Integer>();
+ protected final Map<Integer, Pair<ActivityId, Integer>> clusterInputIndexMap =
+ new HashMap<Integer, Pair<ActivityId, Integer>>();
+ protected final Map<Integer, Pair<ActivityId, Integer>> clusterOutputIndexMap =
+ new HashMap<Integer, Pair<ActivityId, Integer>>();
+ protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterOutputIndexMap =
+ new HashMap<Pair<ActivityId, Integer>, Integer>();
+ protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterInputIndexMap =
+ new HashMap<Pair<ActivityId, Integer>, Integer>();
public OneToOneConnectedActivityCluster(ActivityClusterGraph acg, ActivityClusterId id) {
super(acg, id);
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java
index 57330af..0182e0c 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java
@@ -74,9 +74,10 @@
public void endElement(String uri, String localName, String qName) throws SAXException {
if ("network-switch".equals(localName) || "terminal".equals(localName)) {
ElementStackEntry e = stack.pop();
- NetworkEndpoint endpoint = e.type == EndpointType.NETWORK_SWITCH ? new NetworkSwitch(e.name,
- e.properties, e.ports.toArray(new NetworkSwitch.Port[e.ports.size()])) : new NetworkTerminal(
- e.name, e.properties);
+ NetworkEndpoint endpoint = e.type == EndpointType.NETWORK_SWITCH
+ ? new NetworkSwitch(e.name, e.properties,
+ e.ports.toArray(new NetworkSwitch.Port[e.ports.size()]))
+ : new NetworkTerminal(e.name, e.properties);
stack.peek().ports.add(new NetworkSwitch.Port(endpoint));
} else if ("property".equals(localName)) {
if (!inPropertyElement) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
index e5eec11..f396be9 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
@@ -31,7 +31,6 @@
private StringBuilder sb;
private Object lock1 = new Object();
-
// [Key: Job, Value: [Key: Operator, Value: Duration of each operators]]
private HashMap<String, LinkedHashMap<String, String>> spentTimePerJobMap;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java
index 2305573..abadde3 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java
@@ -25,8 +25,8 @@
import java.io.OutputStream;
public class ExperimentProfilerUtils {
- public static void printToOutputFile(StringBuffer sb, FileOutputStream fos) throws IllegalStateException,
- IOException {
+ public static void printToOutputFile(StringBuffer sb, FileOutputStream fos)
+ throws IllegalStateException, IOException {
fos.write(sb.toString().getBytes());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java
index 55c7915..02a1226 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java
@@ -30,8 +30,8 @@
if (ExecutionTimeProfiler.PROFILE_MODE) {
//SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS");
try {
- executionTimeProfiler = new ExecutionTimeProfiler(profileHomeDir + "executionTime-"
- + Inet4Address.getLocalHost().getHostAddress() + ".txt");
+ executionTimeProfiler = new ExecutionTimeProfiler(
+ profileHomeDir + "executionTime-" + Inet4Address.getLocalHost().getHostAddress() + ".txt");
} catch (UnknownHostException e) {
e.printStackTrace();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java
index a1b40d0..899d3ee 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java
@@ -41,8 +41,9 @@
}
falsePositivePerQuery.begin();
try {
- cacheMissPerQuery = new ExperimentProfiler(PROFILE_HOME_DIR + "cacheMissPerQuery-"
- + Inet4Address.getLocalHost().getHostAddress() + ".txt", 1);
+ cacheMissPerQuery = new ExperimentProfiler(
+ PROFILE_HOME_DIR + "cacheMissPerQuery-" + Inet4Address.getLocalHost().getHostAddress() + ".txt",
+ 1);
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java b/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java
index d16eb15..709f098 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java
@@ -74,7 +74,7 @@
JobIdFactory factory = new JobIdFactory(ccId);
AtomicLong theId = (AtomicLong) idField.get(factory);
Assert.assertEquals(expected, theId.get());
- theId.set((((long)1 << 48) - 1) | expected);
+ theId.set((((long) 1 << 48) - 1) | expected);
JobId jobId = factory.create();
Assert.assertEquals(ccId, jobId.getCcId());
Assert.assertEquals(JobId.MAX_ID, jobId.getIdOnly());
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
index 4310cd0..7eeb913 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
@@ -40,16 +40,16 @@
@Override
public Status getDatasetResultStatus(JobId jobId, ResultSetId rsId) throws Exception {
- HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction gdrlf = new HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction(
- jobId, rsId);
+ HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction gdrlf =
+ new HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction(jobId, rsId);
return (Status) rpci.call(ipcHandle, gdrlf);
}
@Override
public DatasetDirectoryRecord[] getDatasetResultLocations(JobId jobId, ResultSetId rsId,
DatasetDirectoryRecord[] knownRecords) throws Exception {
- HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction gdrlf = new HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction(
- jobId, rsId, knownRecords);
+ HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction gdrlf =
+ new HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction(jobId, rsId, knownRecords);
return (DatasetDirectoryRecord[]) rpci.call(ipcHandle, gdrlf);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
index fc5708d..e7c9042 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
@@ -75,8 +75,7 @@
public HyracksDatasetReader(IHyracksDatasetDirectoryServiceConnection datasetDirectoryServiceConnection,
ClientNetworkManager netManager, IHyracksCommonContext datasetClientCtx, JobId jobId,
- ResultSetId resultSetId)
- throws Exception {
+ ResultSetId resultSetId) throws Exception {
this.datasetDirectoryServiceConnection = datasetDirectoryServiceConnection;
this.netManager = netManager;
this.datasetClientCtx = datasetClientCtx;
@@ -105,8 +104,8 @@
private DatasetDirectoryRecord getRecord(int partition) throws Exception {
while (knownRecords == null || knownRecords[partition] == null) {
- knownRecords = datasetDirectoryServiceConnection
- .getDatasetResultLocations(jobId, resultSetId, knownRecords);
+ knownRecords =
+ datasetDirectoryServiceConnection.getDatasetResultLocations(jobId, resultSetId, knownRecords);
}
return knownRecords[partition];
}
@@ -157,7 +156,7 @@
readBuffer = resultChannel.getNextBuffer();
lastMonitor.notifyFrameRead();
if (readBuffer != null) {
- if (readSize <=0) {
+ if (readSize <= 0) {
int nBlocks = FrameHelper.deserializeNumOfMinFrame(readBuffer);
frame.ensureFrameSize(frame.getMinSize() * nBlocks);
frame.getBuffer().clear();
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
index 5d0865c..9e87f52 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
@@ -39,9 +39,9 @@
* @author yingyib
*/
public class ClientCounterContext implements IClusterCounterContext {
- private static String[] RESET_COUNTERS = { Counters.NETWORK_IO_READ, Counters.NETWORK_IO_WRITE,
- Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.DISK_READ, Counters.DISK_WRITE,
- Counters.NUM_PROCESSOR };
+ private static String[] RESET_COUNTERS =
+ { Counters.NETWORK_IO_READ, Counters.NETWORK_IO_WRITE, Counters.MEMORY_USAGE, Counters.MEMORY_MAX,
+ Counters.DISK_READ, Counters.DISK_WRITE, Counters.NUM_PROCESSOR };
private static String[] AGG_COUNTERS = { Counters.SYSTEM_LOAD };
private static int UPDATE_INTERVAL = 10000;
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java b/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java
index ee33a6d..8bac8e7 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java
@@ -37,9 +37,9 @@
synchronized (this) {
wait(20000);
}
- String[] counters = { Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.NETWORK_IO_READ,
- Counters.NETWORK_IO_WRITE, Counters.SYSTEM_LOAD, Counters.NUM_PROCESSOR, Counters.DISK_READ,
- Counters.DISK_WRITE };
+ String[] counters =
+ { Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.NETWORK_IO_READ, Counters.NETWORK_IO_WRITE,
+ Counters.SYSTEM_LOAD, Counters.NUM_PROCESSOR, Counters.DISK_READ, Counters.DISK_WRITE };
for (String counterName : counters) {
ICounter counter = ccContext.getCounter(counterName, false);
System.out.println(counterName + ": " + counter.get());
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
index ccf798a..e46aa7f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
@@ -94,9 +94,8 @@
case DESTROY_JOB:
HyracksClientInterfaceFunctions.UndeployJobSpecFunction dsjf =
(HyracksClientInterfaceFunctions.UndeployJobSpecFunction) fn;
- ccs.getWorkQueue()
- .schedule(new UndeployJobSpecWork(ccs, dsjf.getDeployedJobSpecId(),
- new IPCResponder<>(handle, mid)));
+ ccs.getWorkQueue().schedule(
+ new UndeployJobSpecWork(ccs, dsjf.getDeployedJobSpecId(), new IPCResponder<>(handle, mid)));
break;
case CANCEL_JOB:
HyracksClientInterfaceFunctions.CancelJobFunction cjf =
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
index ad0cb61..06c92dd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
@@ -101,8 +101,7 @@
case DEPLOYED_JOB_FAILURE:
CCNCFunctions.ReportDeployedJobSpecFailureFunction rdjf =
(CCNCFunctions.ReportDeployedJobSpecFailureFunction) fn;
- ccs.getWorkQueue()
- .schedule(new DeployedJobFailureWork(rdjf.getDeployedJobSpecId(), rdjf.getNodeId()));
+ ccs.getWorkQueue().schedule(new DeployedJobFailureWork(rdjf.getDeployedJobSpecId(), rdjf.getNodeId()));
break;
case REGISTER_PARTITION_PROVIDER:
CCNCFunctions.RegisterPartitionProviderFunction rppf =
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index 1ec7485..a6edd70 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -171,8 +171,8 @@
final ClusterTopology topology = computeClusterTopology(ccConfig);
ccContext = new ClusterControllerContext(topology);
sweeper = new DeadNodeSweeper();
- datasetDirectoryService = new DatasetDirectoryService(ccConfig.getResultTTL(),
- ccConfig.getResultSweepThreshold());
+ datasetDirectoryService =
+ new DatasetDirectoryService(ccConfig.getResultTTL(), ccConfig.getResultSweepThreshold());
deploymentRunMap = new HashMap<>();
stateDumpRunMap = new HashMap<>();
@@ -208,9 +208,9 @@
clusterIPC = new IPCSystem(new InetSocketAddress(ccConfig.getClusterListenPort()), ccIPCI,
new CCNCFunctions.SerializerDeserializer());
IIPCI ciIPCI = new ClientInterfaceIPCI(this, jobIdFactory);
- clientIPC = new IPCSystem(
- new InetSocketAddress(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort()), ciIPCI,
- new JavaSerializationBasedPayloadSerializerDeserializer());
+ clientIPC =
+ new IPCSystem(new InetSocketAddress(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort()),
+ ciIPCI, new JavaSerializationBasedPayloadSerializerDeserializer());
webServer = new WebServer(this, ccConfig.getConsoleListenPort());
clusterIPC.start();
clientIPC.start();
@@ -238,9 +238,9 @@
// Job manager is in charge of job lifecycle management.
try {
- Constructor<?> jobManagerConstructor = this.getClass().getClassLoader()
- .loadClass(ccConfig.getJobManagerClass())
- .getConstructor(CCConfig.class, ClusterControllerService.class, IJobCapacityController.class);
+ Constructor<?> jobManagerConstructor =
+ this.getClass().getClassLoader().loadClass(ccConfig.getJobManagerClass()).getConstructor(
+ CCConfig.class, ClusterControllerService.class, IJobCapacityController.class);
jobManager = (IJobManager) jobManagerConstructor.newInstance(ccConfig, this, jobCapacityController);
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
| InvocationTargetException e) {
@@ -272,8 +272,8 @@
private void connectNCs() {
getNCServices().forEach((key, value) -> {
- final TriggerNCWork triggerWork = new TriggerNCWork(ClusterControllerService.this, value.getHostString(),
- value.getPort(), key);
+ final TriggerNCWork triggerWork =
+ new TriggerNCWork(ClusterControllerService.this, value.getHostString(), value.getPort(), key);
executor.submit(triggerWork);
});
}
@@ -428,8 +428,8 @@
@Override
public void getIPAddressNodeMap(Map<InetAddress, Set<String>> map) throws HyracksDataException {
- GetIpAddressNodeNameMapWork ginmw = new GetIpAddressNodeNameMapWork(
- ClusterControllerService.this.getNodeManager(), map);
+ GetIpAddressNodeNameMapWork ginmw =
+ new GetIpAddressNodeNameMapWork(ClusterControllerService.this.getNodeManager(), map);
try {
workQueue.scheduleAndSync(ginmw);
} catch (Exception e) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java
index 1a3051e..0e22c25 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java
@@ -38,9 +38,8 @@
}
public void addDeployedJobSpecDescriptor(DeployedJobSpecId deployedJobSpecId,
- ActivityClusterGraph activityClusterGraph,
- JobSpecification jobSpecification, Set<Constraint> activityClusterGraphConstraints)
- throws HyracksException {
+ ActivityClusterGraph activityClusterGraph, JobSpecification jobSpecification,
+ Set<Constraint> activityClusterGraphConstraints) throws HyracksException {
if (deployedJobSpecDescriptorMap.get(deployedJobSpecId) != null) {
throw HyracksException.create(ErrorCode.DUPLICATE_DEPLOYED_JOB, deployedJobSpecId);
}
@@ -80,8 +79,8 @@
private final Set<Constraint> activityClusterGraphConstraints;
- private DeployedJobSpecDescriptor(ActivityClusterGraph activityClusterGraph,
- JobSpecification jobSpecification, Set<Constraint> activityClusterGraphConstraints) {
+ private DeployedJobSpecDescriptor(ActivityClusterGraph activityClusterGraph, JobSpecification jobSpecification,
+ Set<Constraint> activityClusterGraphConstraints) {
this.activityClusterGraph = activityClusterGraph;
this.jobSpecification = jobSpecification;
this.activityClusterGraphConstraints = activityClusterGraphConstraints;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index 742e2e0..98cf67a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -98,8 +98,7 @@
}
// Updates the node registry.
if (nodeRegistry.containsKey(nodeId)) {
- LOGGER.warn(
- "Node with name " + nodeId + " has already registered; failing the node then re-registering.");
+ LOGGER.warn("Node with name " + nodeId + " has already registered; failing the node then re-registering.");
removeDeadNode(nodeId);
} else {
try {
@@ -150,8 +149,8 @@
Set<String> deadNodes = new HashSet<>();
Set<JobId> affectedJobIds = new HashSet<>();
Iterator<Map.Entry<String, NodeControllerState>> nodeIterator = nodeRegistry.entrySet().iterator();
- long deadNodeNanosThreshold = TimeUnit.MILLISECONDS
- .toNanos(ccConfig.getHeartbeatMaxMisses() * ccConfig.getHeartbeatPeriodMillis());
+ long deadNodeNanosThreshold =
+ TimeUnit.MILLISECONDS.toNanos(ccConfig.getHeartbeatMaxMisses() * ccConfig.getHeartbeatPeriodMillis());
while (nodeIterator.hasNext()) {
Map.Entry<String, NodeControllerState> entry = nodeIterator.next();
String nodeId = entry.getKey();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
index ea37cdd..3fe88bf 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
@@ -102,12 +102,10 @@
ActivityCluster dAC = ac.getActivityClusterGraph().getActivityMap().get(danId);
ActivityClusterPlan dACP = jobRun.getActivityClusterPlanMap().get(dAC.getId());
assert dACP != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for "
- + "dependency AC: Encountered no plan for ActivityID "
- + danId;
+ + "dependency AC: Encountered no plan for ActivityID " + danId;
Task[] dATasks = dACP.getActivityPlanMap().get(danId).getTasks();
assert dATasks != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for"
- + " dependency AC: Encountered no plan for ActivityID "
- + danId;
+ + " dependency AC: Encountered no plan for ActivityID " + danId;
assert dATasks.length == tasks.length : "Dependency activity partitioned differently from "
+ "dependent: " + dATasks.length + " != " + tasks.length;
Task dTask = dATasks[i];
@@ -125,8 +123,8 @@
private TaskCluster[] computeTaskClusters(ActivityCluster ac, JobRun jobRun,
Map<ActivityId, ActivityPlan> activityPlanMap) {
Set<ActivityId> activities = ac.getActivityMap().keySet();
- Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = computeTaskConnectivity(jobRun,
- activityPlanMap, activities);
+ Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity =
+ computeTaskConnectivity(jobRun, activityPlanMap, activities);
TaskCluster[] taskClusters = ac.getActivityClusterGraph().isUseConnectorPolicyForScheduling()
? buildConnectorPolicyAwareTaskClusters(ac, activityPlanMap, taskConnectivity)
@@ -139,13 +137,13 @@
List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(tid);
if (cInfoList != null) {
for (Pair<TaskId, ConnectorDescriptorId> p : cInfoList) {
- Task targetTS = activityPlanMap.get(p.getLeft().getActivityId()).getTasks()[p.getLeft()
- .getPartition()];
+ Task targetTS =
+ activityPlanMap.get(p.getLeft().getActivityId()).getTasks()[p.getLeft().getPartition()];
TaskCluster targetTC = targetTS.getTaskCluster();
if (targetTC != tc) {
ConnectorDescriptorId cdId = p.getRight();
- PartitionId pid = new PartitionId(jobRun.getJobId(), cdId, tid.getPartition(), p.getLeft()
- .getPartition());
+ PartitionId pid = new PartitionId(jobRun.getJobId(), cdId, tid.getPartition(),
+ p.getLeft().getPartition());
tc.getProducedPartitions().add(pid);
targetTC.getRequiredPartitions().add(pid);
partitionProducingTaskClusterMap.put(pid, tc);
@@ -170,8 +168,8 @@
Task[] tasks = ap.getTasks();
taskStates.addAll(Arrays.asList(tasks));
}
- TaskCluster tc = new TaskCluster(new TaskClusterId(ac.getId(), 0), ac, taskStates.toArray(new Task[taskStates
- .size()]));
+ TaskCluster tc =
+ new TaskCluster(new TaskClusterId(ac.getId(), 0), ac, taskStates.toArray(new Task[taskStates.size()]));
for (Task t : tc.getTasks()) {
t.setTaskCluster(tc);
}
@@ -209,8 +207,8 @@
}
for (int i = 0; i < nProducers; ++i) {
c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
- List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity
- .get(ac1TaskStates[i].getTaskId());
+ List<Pair<TaskId, ConnectorDescriptorId>> cInfoList =
+ taskConnectivity.get(ac1TaskStates[i].getTaskId());
if (cInfoList == null) {
cInfoList = new ArrayList<>();
taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList);
@@ -358,9 +356,9 @@
int[] fanouts = new int[nProducers];
if (c.allProducersToAllConsumers()) {
- for (int i = 0; i < nProducers; ++i) {
- fanouts[i] = nConsumers;
- }
+ for (int i = 0; i < nProducers; ++i) {
+ fanouts[i] = nConsumers;
+ }
} else {
for (int i = 0; i < nProducers; ++i) {
c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
@@ -402,8 +400,8 @@
throw new HyracksException("No value found for " + lv);
}
if (!(value instanceof Number)) {
- throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "("
- + value + ")");
+ throw new HyracksException(
+ "Unexpected type of value bound to " + lv + ": " + value.getClass() + "(" + value + ")");
}
int nParts = ((Number) value).intValue();
if (nParts <= 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
index 58f44ef..fa08420 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
@@ -98,8 +98,8 @@
private Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations;
- private JobRun(DeploymentId deploymentId, JobId jobId, Set<JobFlag> jobFlags,
- JobSpecification spec, ActivityClusterGraph acg) {
+ private JobRun(DeploymentId deploymentId, JobId jobId, Set<JobFlag> jobFlags, JobSpecification spec,
+ ActivityClusterGraph acg) {
this.deploymentId = deploymentId;
this.jobId = jobId;
this.jobFlags = jobFlags;
@@ -118,10 +118,9 @@
//Run a deployed job spec
public JobRun(ClusterControllerService ccs, DeploymentId deploymentId, JobId jobId, Set<JobFlag> jobFlags,
DeployedJobSpecDescriptor deployedJobSpecDescriptor, Map<byte[], byte[]> jobParameters,
- DeployedJobSpecId deployedJobSpecId)
- throws HyracksException {
- this(deploymentId, jobId, jobFlags,
- deployedJobSpecDescriptor.getJobSpecification(), deployedJobSpecDescriptor.getActivityClusterGraph());
+ DeployedJobSpecId deployedJobSpecId) throws HyracksException {
+ this(deploymentId, jobId, jobFlags, deployedJobSpecDescriptor.getJobSpecification(),
+ deployedJobSpecDescriptor.getActivityClusterGraph());
ccs.createOrGetJobParameterByteStore(jobId).setParameters(jobParameters);
Set<Constraint> constaints = deployedJobSpecDescriptor.getActivityClusterGraphConstraints();
this.scheduler = new JobExecutor(ccs, this, constaints, deployedJobSpecId);
@@ -252,7 +251,7 @@
return connectorPolicyMap;
}
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode result = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
index c5e51a6..6f5c5ad 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
@@ -49,7 +49,8 @@
public List<Pair<PartitionDescriptor, PartitionRequest>> registerPartitionDescriptor(
PartitionDescriptor partitionDescriptor) {
- List<Pair<PartitionDescriptor, PartitionRequest>> matches = new ArrayList<Pair<PartitionDescriptor, PartitionRequest>>();
+ List<Pair<PartitionDescriptor, PartitionRequest>> matches =
+ new ArrayList<Pair<PartitionDescriptor, PartitionRequest>>();
PartitionId pid = partitionDescriptor.getPartitionId();
boolean matched = false;
List<PartitionRequest> requests = partitionRequests.get(pid);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java
index 69f0571..9430c0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java
@@ -23,7 +23,8 @@
public class JSONOutputRequestUtil {
- private JSONOutputRequestUtil() {}
+ private JSONOutputRequestUtil() {
+ }
public static URI uri(String host, String prefix, String path) throws URISyntaxException {
String name = host;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
index 3babf00..d0c6567 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
@@ -58,8 +58,8 @@
Map<ActivityId, ActivityCluster> activityClusterMap = run.getActivityClusterGraph().getActivityMap();
ActivityCluster ac = activityClusterMap.get(tid.getActivityId());
if (ac != null) {
- Map<ActivityId, ActivityPlan> taskStateMap = run.getActivityClusterPlanMap().get(ac.getId())
- .getActivityPlanMap();
+ Map<ActivityId, ActivityPlan> taskStateMap =
+ run.getActivityClusterPlanMap().get(ac.getId()).getActivityPlanMap();
Task[] taskStates = taskStateMap.get(tid.getActivityId()).getTasks();
if (taskStates != null && taskStates.length > tid.getPartition()) {
Task ts = taskStates[tid.getPartition()];
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java
index c0ecffb..4962607 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java
@@ -65,8 +65,8 @@
/**
* Deploy for the cluster controller
*/
- DeploymentUtils.deploy(deploymentId, binaryURLs, ccs.getContext()
- .getJobSerializerDeserializerContainer(), ccs.getServerContext(), false);
+ DeploymentUtils.deploy(deploymentId, binaryURLs, ccs.getContext().getJobSerializerDeserializerContainer(),
+ ccs.getServerContext(), false);
/**
* Deploy for the node controllers
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
index b44c58c..a7c3c2f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
@@ -41,7 +41,7 @@
private final IResultCallback<Boolean> callback;
public ClusterShutdownWork(ClusterControllerService ncs, boolean terminateNCService,
- IResultCallback<Boolean> callback) {
+ IResultCallback<Boolean> callback) {
this.ccs = ncs;
this.terminateNCService = terminateNCService;
this.callback = callback;
@@ -77,8 +77,8 @@
/*
* best effort - just exit, user will have to kill misbehaving NCs
*/
- LOGGER.error("Clean shutdown of NCs timed out- giving up; unresponsive nodes: " +
- shutdownStatus.getRemainingNodes());
+ LOGGER.error("Clean shutdown of NCs timed out- giving up; unresponsive nodes: "
+ + shutdownStatus.getRemainingNodes());
}
callback.setValue(cleanShutdown);
ccs.stop(terminateNCService);
@@ -97,8 +97,8 @@
LOGGER.info("Notifying NC " + nodeId + " to shutdown...");
ncState.getNodeController().shutdown(terminateNCService);
} catch (Exception e) {
- LOGGER.log(Level.INFO,
- "Exception shutting down NC " + nodeId + " (possibly dead?), continuing shutdown...", e);
+ LOGGER.log(Level.INFO, "Exception shutting down NC " + nodeId + " (possibly dead?), continuing shutdown...",
+ e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java
index f7335a8..c51f3c5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java
@@ -59,8 +59,7 @@
acggf.createActivityClusterGraphGenerator(ccServiceCtx, EnumSet.noneOf(JobFlag.class));
ActivityClusterGraph acg = acgg.initialize();
ccs.getDeployedJobSpecStore().addDeployedJobSpecDescriptor(deployedJobSpecId, acg,
- acggf.getJobSpecification(),
- acgg.getConstraints());
+ acggf.getJobSpecification(), acgg.getConstraints());
byte[] acgBytes = JavaSerializationUtils.serialize(acg);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java
index 8fe6470..009e445 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java
@@ -40,8 +40,8 @@
protected void doRun() throws Exception {
try {
JobRun run = jobManager.get(jobId);
- JobInfo info = (run != null) ? new JobInfo(run.getJobId(), run.getStatus(), run.getOperatorLocations())
- : null;
+ JobInfo info =
+ (run != null) ? new JobInfo(run.getJobId(), run.getStatus(), run.getOperatorLocations()) : null;
callback.setValue(info);
} catch (Exception e) {
callback.setException(e);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
index 9c680c3..ccd8286 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
@@ -46,7 +46,7 @@
populateJSON(jobManager.getArchivedJobs());
}
- private void populateJSON(Collection<JobRun> jobRuns) {
+ private void populateJSON(Collection<JobRun> jobRuns) {
ObjectMapper om = new ObjectMapper();
for (JobRun run : jobRuns) {
ObjectNode jo = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
index 6433223..517f56f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
@@ -44,8 +44,8 @@
import org.apache.hyracks.control.common.work.SynchronizableWork;
public class GetNodeDetailsJSONWork extends SynchronizableWork {
- private static final Section [] CC_SECTIONS = { Section.CC, Section.COMMON };
- private static final Section [] NC_SECTIONS = { Section.NC, Section.COMMON };
+ private static final Section[] CC_SECTIONS = { Section.CC, Section.COMMON };
+ private static final Section[] NC_SECTIONS = { Section.NC, Section.COMMON };
private final INodeManager nodeManager;
private final CCConfig ccConfig;
@@ -153,7 +153,6 @@
return o;
}
-
public ObjectNode getDetail() {
return detail;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
index e1b59e1..b064e52 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
@@ -42,7 +42,6 @@
private final IResultCallback<String> callback;
private final ThreadDumpRun run;
-
public GetThreadDumpWork(ClusterControllerService ccs, String nodeId, IResultCallback<String> callback) {
this.ccs = ccs;
this.nodeId = nodeId;
@@ -83,8 +82,8 @@
Thread.sleep(sleepTime);
}
if (ccs.removeThreadDumpRun(run.getRequestId()) != null) {
- LOGGER.log(Level.WARN, "Timed out thread dump request " + run.getRequestId()
- + " for node " + nodeId);
+ LOGGER.log(Level.WARN,
+ "Timed out thread dump request " + run.getRequestId() + " for node " + nodeId);
callback.setException(new TimeoutException("Thread dump request for node " + nodeId
+ " timed out after " + TIMEOUT_SECS + " seconds."));
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
index b3b33c9..cc37f9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
@@ -58,8 +58,8 @@
}
Set<String> cleanupPendingNodes = run.getCleanupPendingNodeIds();
if (!cleanupPendingNodes.remove(nodeId)) {
- LOGGER.log(Level.WARN, () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes +
- " for job " + jobId);
+ LOGGER.log(Level.WARN,
+ () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes + " for job " + jobId);
return;
}
INodeManager nodeManager = ccs.getNodeManager();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
index 04a34af..77ecbee 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
@@ -56,9 +56,8 @@
Map<IOption, Object> ncConfiguration = new HashMap<>();
try {
LOGGER.log(Level.WARN, "Registering INodeController: id = " + id);
- NodeControllerRemoteProxy nc =
- new NodeControllerRemoteProxy(ccs.getCcId(),
- ccs.getClusterIPC().getReconnectingHandle(reg.getNodeControllerAddress()));
+ NodeControllerRemoteProxy nc = new NodeControllerRemoteProxy(ccs.getCcId(),
+ ccs.getClusterIPC().getReconnectingHandle(reg.getNodeControllerAddress()));
NodeControllerState state = new NodeControllerState(nc, reg);
INodeManager nodeManager = ccs.getNodeManager();
nodeManager.addNode(id, state);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
index edc57fb..23a81af 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
@@ -49,8 +49,8 @@
return;
}
PartitionMatchMaker pmm = run.getPartitionMatchMaker();
- List<Pair<PartitionDescriptor, PartitionRequest>> matches = pmm
- .registerPartitionDescriptor(partitionDescriptor);
+ List<Pair<PartitionDescriptor, PartitionRequest>> matches =
+ pmm.registerPartitionDescriptor(partitionDescriptor);
for (Pair<PartitionDescriptor, PartitionRequest> match : matches) {
try {
PartitionUtils.reportPartitionMatch(ccs, pid, match);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
index aef331f..ad8882d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
@@ -47,6 +47,7 @@
this.ncPort = ncPort;
this.ncId = ncId;
}
+
@Override
public final void doRun() {
LOGGER.info("Connecting to NC service '" + ncId + "' at " + ncHost + ":" + ncPort);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
index 2f80f5b..aa7a4fe 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
@@ -54,6 +54,7 @@
this.ncPort = ncPort;
this.ncId = ncId;
}
+
@Override
public final void run() {
ccs.getExecutor().execute(() -> {
@@ -68,8 +69,8 @@
return;
// QQQ Should probably have an ACK here
} catch (IOException e) {
- LOGGER.log(Level.WARN, "Failed to contact NC service at " + ncHost + ":" + ncPort
- + "; will retry", e);
+ LOGGER.log(Level.WARN, "Failed to contact NC service at " + ncHost + ":" + ncPort + "; will retry",
+ e);
}
try {
Thread.sleep(5000);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
index 92e90e7..44f57fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
@@ -42,17 +42,17 @@
@Override
public String getString(String section, String key) {
- return (String)get(section, key);
+ return (String) get(section, key);
}
@Override
public int getInt(String section, String key) {
- return (int)get(section, key);
+ return (int) get(section, key);
}
@Override
public long getLong(String section, String key) {
- return (long)get(section, key);
+ return (long) get(section, key);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
index 2307185..9cf84dd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
@@ -40,8 +40,7 @@
void unregisterNode(String nodeId) throws Exception;
- void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
- throws Exception;
+ void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics) throws Exception;
void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, List<Exception> exceptions)
throws Exception;
@@ -66,8 +65,8 @@
void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception;
- void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult,
- boolean emptyResult, int partition, int nPartitions, NetworkAddress networkAddress) throws Exception;
+ void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult, boolean emptyResult,
+ int partition, int nPartitions, NetworkAddress networkAddress) throws Exception;
void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws Exception;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
index ef3b27c..9ec55f4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
@@ -37,8 +37,8 @@
public interface INodeController {
void startTasks(DeploymentId deploymentId, JobId jobId, byte[] planBytes,
- List<TaskAttemptDescriptor> taskDescriptors, Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies,
- Set<JobFlag> flags, Map<byte[], byte[]> jobParameters, DeployedJobSpecId deployedJobSpecId)
+ List<TaskAttemptDescriptor> taskDescriptors, Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies,
+ Set<JobFlag> flags, Map<byte[], byte[]> jobParameters, DeployedJobSpecId deployedJobSpecId)
throws Exception;
void abortTasks(JobId jobId, List<TaskAttemptId> tasks) throws Exception;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
index 67ea33f..986ca96 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -69,8 +69,8 @@
private HashSet<IOption> registeredOptions = new HashSet<>();
private HashMap<IOption, Object> definedMap = new HashMap<>();
private HashMap<IOption, Object> defaultMap = new HashMap<>();
- private CompositeMap<IOption, Object> configurationMap = new CompositeMap<>(definedMap, defaultMap,
- new NoOpMapMutator());
+ private CompositeMap<IOption, Object> configurationMap =
+ new CompositeMap<>(definedMap, defaultMap, new NoOpMapMutator());
private EnumMap<Section, Map<String, IOption>> sectionMap = new EnumMap<>(Section.class);
@SuppressWarnings("squid:S1948") // TreeMap is serializable, and therefore so is its synchronized map
private Map<String, Map<IOption, Object>> nodeSpecificMap = Collections.synchronizedMap(new TreeMap<>());
@@ -474,8 +474,7 @@
}
});
extensionOptions.forEach((extension, options) -> {
- options.forEach(option -> ini
- .add(extension, option.getKey(), option.getValue()));
+ options.forEach(option -> ini.add(extension, option.getKey(), option.getValue()));
});
return ini;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
index adf1774..4fa9b56 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
@@ -150,25 +150,23 @@
return value;
}
- public static String getString(Ini ini, org.apache.hyracks.api.config.Section section,
- IOption option, String defaultValue) {
+ public static String getString(Ini ini, org.apache.hyracks.api.config.Section section, IOption option,
+ String defaultValue) {
return getString(ini, section.sectionName(), option.ini(), defaultValue);
}
public static void addConfigToJSON(ObjectNode o, IApplicationConfig cfg,
- org.apache.hyracks.api.config.Section... sections) {
+ org.apache.hyracks.api.config.Section... sections) {
ArrayNode configArray = o.putArray("config");
for (org.apache.hyracks.api.config.Section section : cfg.getSections(Arrays.asList(sections)::contains)) {
ObjectNode sectionNode = configArray.addObject();
Map<String, Object> sectionConfig = getSectionOptionsForJSON(cfg, section, option -> true);
- sectionNode.put("section", section.sectionName())
- .putPOJO("properties", sectionConfig);
+ sectionNode.put("section", section.sectionName()).putPOJO("properties", sectionConfig);
}
}
public static Map<String, Object> getSectionOptionsForJSON(IApplicationConfig cfg,
- org.apache.hyracks.api.config.Section section,
- Predicate<IOption> selector) {
+ org.apache.hyracks.api.config.Section section, Predicate<IOption> selector) {
Map<String, Object> sectionConfig = new TreeMap<>();
for (IOption option : cfg.getOptions(section)) {
if (selector.test(option)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
index 42ed1e7..3807a00 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
@@ -37,8 +37,7 @@
}
long result1 = StorageUtil.getByteValue(s);
if (result1 > Integer.MAX_VALUE || result1 < Integer.MIN_VALUE) {
- throw new IllegalArgumentException(
- "The given value: " + result1 + " is not within the int range.");
+ throw new IllegalArgumentException("The given value: " + result1 + " is not within the int range.");
}
return (int) result1;
}
@@ -50,12 +49,12 @@
@Override
public String serializeToHumanReadable(Object value) {
- return value + " (" + StorageUtil.toHumanReadableSize((int)value) + ")";
+ return value + " (" + StorageUtil.toHumanReadableSize((int) value) + ")";
}
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (int)value);
+ node.put(fieldName, (int) value);
}
};
@@ -72,12 +71,12 @@
@Override
public String serializeToHumanReadable(Object value) {
- return value + " (" + StorageUtil.toHumanReadableSize((long)value) + ")";
+ return value + " (" + StorageUtil.toHumanReadableSize((long) value) + ")";
}
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (long)value);
+ node.put(fieldName, (long) value);
}
};
@@ -88,7 +87,7 @@
if (Integer.highestOneBit(value) > 16) {
throw new IllegalArgumentException("The given value " + s + " is too big for a short");
}
- return (short)value;
+ return (short) value;
}
@Override
@@ -98,7 +97,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (short)value);
+ node.put(fieldName, (short) value);
}
};
@@ -115,7 +114,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (int)value);
+ node.put(fieldName, (int) value);
}
};
@@ -132,7 +131,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (double)value);
+ node.put(fieldName, (double) value);
}
};
@@ -149,7 +148,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (String)value);
+ node.put(fieldName, (String) value);
}
};
@@ -166,7 +165,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (long)value);
+ node.put(fieldName, (long) value);
}
};
@@ -183,7 +182,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (boolean)value);
+ node.put(fieldName, (boolean) value);
}
};
@@ -207,12 +206,12 @@
@Override
public String serializeToJSON(Object value) {
- return value == null ? null : ((Level)value).name();
+ return value == null ? null : ((Level) value).name();
}
@Override
public String serializeToIni(Object value) {
- return ((Level)value).name();
+ return ((Level) value).name();
}
@Override
@@ -221,25 +220,25 @@
}
};
- public static final IOptionType<String []> STRING_ARRAY = new IOptionType<String []>() {
+ public static final IOptionType<String[]> STRING_ARRAY = new IOptionType<String[]>() {
@Override
- public String [] parse(String s) {
+ public String[] parse(String s) {
return s == null ? null : s.split("\\s*,\\s*");
}
@Override
- public Class<String []> targetType() {
- return String [].class;
+ public Class<String[]> targetType() {
+ return String[].class;
}
@Override
public String serializeToIni(Object value) {
- return String.join(",", (String [])value);
+ return String.join(",", (String[]) value);
}
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, value == null ? null : StringUtils.join((String [])value, ','));
+ node.put(fieldName, value == null ? null : StringUtils.join((String[]) value, ','));
}
};
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
index 85731b6..13e4504 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
@@ -60,17 +60,14 @@
RESULT_TTL(LONG, 86400000L), // TODO(mblow): add time unit
RESULT_SWEEP_THRESHOLD(LONG, 60000L), // TODO(mblow): add time unit
@SuppressWarnings("RedundantCast") // not redundant- false positive from IDEA
- ROOT_DIR(STRING, (Function<IApplicationConfig, String>) appConfig ->
- FileUtil.joinPath(appConfig.getString(ControllerConfig.Option.DEFAULT_DIR),
- "ClusterControllerService"), "<value of " + ControllerConfig.Option.DEFAULT_DIR.cmdline() +
- ">/ClusterControllerService"),
+ ROOT_DIR(STRING, (Function<IApplicationConfig, String>) appConfig -> FileUtil.joinPath(appConfig.getString(ControllerConfig.Option.DEFAULT_DIR), "ClusterControllerService"), "<value of " + ControllerConfig.Option.DEFAULT_DIR.cmdline() + ">/ClusterControllerService"),
CLUSTER_TOPOLOGY(STRING),
JOB_QUEUE_CLASS(STRING, "org.apache.hyracks.control.cc.scheduler.FIFOJobQueue"),
JOB_QUEUE_CAPACITY(INTEGER, 4096),
JOB_MANAGER_CLASS(STRING, "org.apache.hyracks.control.cc.job.JobManager"),
ENFORCE_FRAME_WRITER_PROTOCOL(BOOLEAN, false),
CORES_MULTIPLIER(INTEGER, 3),
- CONTROLLER_ID(SHORT, (short)0x0000);
+ CONTROLLER_ID(SHORT, (short) 0x0000);
private final IOptionType parser;
private Object defaultValue;
@@ -93,7 +90,7 @@
}
<T> Option(IOptionType<T> parser, Function<IApplicationConfig, T> defaultValue,
- String defaultValueDescription) {
+ String defaultValueDescription) {
this.parser = parser;
this.defaultValue = defaultValue;
this.defaultValueDescription = defaultValueDescription;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
index 19c89e0..8ecd312 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
@@ -33,6 +33,7 @@
public class ControllerConfig implements Serializable {
private static final long serialVersionUID = 1L;
+
public enum Option implements IOption {
CONFIG_FILE(OptionTypes.STRING, "Specify path to master configuration file", null),
CONFIG_FILE_URL(OptionTypes.URL, "Specify URL to master configuration file", null),
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
index 95c063f..519bafc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
@@ -50,7 +50,7 @@
NCSERVICE_PORT(INTEGER, 9090),
CLUSTER_ADDRESS(STRING, (String) null),
CLUSTER_PORT(INTEGER, 1099),
- CLUSTER_CONTROLLER_ID(SHORT, (short)0x0000),
+ CLUSTER_CONTROLLER_ID(SHORT, (short) 0x0000),
CLUSTER_PUBLIC_ADDRESS(STRING, PUBLIC_ADDRESS),
CLUSTER_PUBLIC_PORT(INTEGER, CLUSTER_LISTEN_PORT),
NODE_ID(STRING, (String) null),
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java
index 89d6e78..75ef0b7 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java
@@ -75,11 +75,10 @@
private final long maxJobId;
public NodeRegistration(InetSocketAddress ncAddress, String nodeId, NCConfig ncConfig, NetworkAddress dataPort,
- NetworkAddress datasetPort, String osName, String arch, String osVersion, int nProcessors,
- String vmName, String vmVersion, String vmVendor, String classpath, String libraryPath,
- String bootClasspath, List<String> inputArguments, Map<String, String> systemProperties,
- HeartbeatSchema hbSchema, NetworkAddress messagingPort, NodeCapacity capacity, int pid,
- long maxJobId) {
+ NetworkAddress datasetPort, String osName, String arch, String osVersion, int nProcessors, String vmName,
+ String vmVersion, String vmVendor, String classpath, String libraryPath, String bootClasspath,
+ List<String> inputArguments, Map<String, String> systemProperties, HeartbeatSchema hbSchema,
+ NetworkAddress messagingPort, NodeCapacity capacity, int pid, long maxJobId) {
this.ncAddress = ncAddress;
this.nodeId = nodeId;
this.ncConfig = ncConfig;
@@ -184,7 +183,9 @@
return messagingPort;
}
- public int getPid() { return pid; }
+ public int getPid() {
+ return pid;
+ }
public long getMaxJobId() {
return maxJobId;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java
index 1b790b7..4a44356 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java
@@ -23,5 +23,6 @@
START_NC,
TERMINATE
}
+
public static final String NC_SERVICE_MAGIC_COOKIE = "hyncmagic2";
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java
index bb65f7f..4d8c137 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java
@@ -119,8 +119,8 @@
throws HyracksException {
try {
IJobSerializerDeserializerContainer jobSerDeContainer = serviceCtx.getJobSerializerDeserializerContainer();
- IJobSerializerDeserializer jobSerDe = deploymentId == null ? null
- : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
+ IJobSerializerDeserializer jobSerDe =
+ deploymentId == null ? null : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
return jobSerDe == null ? JavaSerializationUtils.deserialize(bytes) : jobSerDe.deserialize(bytes);
} catch (Exception e) {
throw new HyracksException(e);
@@ -140,8 +140,8 @@
throws HyracksException {
try {
IJobSerializerDeserializerContainer jobSerDeContainer = serviceCtx.getJobSerializerDeserializerContainer();
- IJobSerializerDeserializer jobSerDe = deploymentId == null ? null
- : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
+ IJobSerializerDeserializer jobSerDe =
+ deploymentId == null ? null : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
return jobSerDe == null ? JavaSerializationUtils.loadClass(className) : jobSerDe.loadClass(className);
} catch (ClassNotFoundException | IOException e) {
throw new HyracksException(e);
@@ -159,8 +159,8 @@
public static ClassLoader getClassLoader(DeploymentId deploymentId, IServiceContext appCtx)
throws HyracksException {
IJobSerializerDeserializerContainer jobSerDeContainer = appCtx.getJobSerializerDeserializerContainer();
- IJobSerializerDeserializer jobSerDe = deploymentId == null ? null
- : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
+ IJobSerializerDeserializer jobSerDe =
+ deploymentId == null ? null : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
return jobSerDe == null ? DeploymentUtils.class.getClassLoader() : jobSerDe.getClassLoader();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
index e4e2dbe..0fdafe3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -80,16 +80,14 @@
@Override
public void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
throws Exception {
- NotifyTaskCompleteFunction fn = new NotifyTaskCompleteFunction(jobId, taskId,
- nodeId, statistics);
+ NotifyTaskCompleteFunction fn = new NotifyTaskCompleteFunction(jobId, taskId, nodeId, statistics);
ipcHandle.send(-1, fn, null);
}
@Override
public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, List<Exception> exceptions)
throws Exception {
- NotifyTaskFailureFunction fn = new NotifyTaskFailureFunction(jobId, taskId, nodeId,
- exceptions);
+ NotifyTaskFailureFunction fn = new NotifyTaskFailureFunction(jobId, taskId, nodeId, exceptions);
ipcHandle.send(-1, fn, null);
}
@@ -101,8 +99,7 @@
@Override
public void notifyDeployBinary(DeploymentId deploymentId, String nodeId, DeploymentStatus status) throws Exception {
- NotifyDeployBinaryFunction fn = new NotifyDeployBinaryFunction(deploymentId, nodeId,
- status);
+ NotifyDeployBinaryFunction fn = new NotifyDeployBinaryFunction(deploymentId, nodeId, status);
ipcHandle.send(-1, fn, null);
}
@@ -120,37 +117,34 @@
@Override
public void registerPartitionProvider(PartitionDescriptor partitionDescriptor) throws Exception {
- RegisterPartitionProviderFunction fn = new RegisterPartitionProviderFunction(
- partitionDescriptor);
+ RegisterPartitionProviderFunction fn = new RegisterPartitionProviderFunction(partitionDescriptor);
ipcHandle.send(-1, fn, null);
}
@Override
public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception {
- RegisterPartitionRequestFunction fn = new RegisterPartitionRequestFunction(
- partitionRequest);
+ RegisterPartitionRequestFunction fn = new RegisterPartitionRequestFunction(partitionRequest);
ipcHandle.send(-1, fn, null);
}
@Override
public void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception {
- SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data,
- deploymentId, nodeId);
+ SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data, deploymentId, nodeId);
ipcHandle.send(-1, fn, null);
}
@Override
public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult,
boolean emptyResult, int partition, int nPartitions, NetworkAddress networkAddress) throws Exception {
- RegisterResultPartitionLocationFunction fn = new RegisterResultPartitionLocationFunction(
- jobId, rsId, orderedResult, emptyResult, partition, nPartitions, networkAddress);
+ RegisterResultPartitionLocationFunction fn = new RegisterResultPartitionLocationFunction(jobId, rsId,
+ orderedResult, emptyResult, partition, nPartitions, networkAddress);
ipcHandle.send(-1, fn, null);
}
@Override
public void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws Exception {
- ReportResultPartitionWriteCompletionFunction fn = new ReportResultPartitionWriteCompletionFunction(
- jobId, rsId, partition);
+ ReportResultPartitionWriteCompletionFunction fn =
+ new ReportResultPartitionWriteCompletionFunction(jobId, rsId, partition);
ipcHandle.send(-1, fn, null);
}
@@ -167,8 +161,7 @@
@Override
public void notifyStateDump(String nodeId, String stateDumpId, String state) throws Exception {
- StateDumpResponseFunction fn = new StateDumpResponseFunction(nodeId, stateDumpId,
- state);
+ StateDumpResponseFunction fn = new StateDumpResponseFunction(nodeId, stateDumpId, state);
ipcHandle.send(-1, fn, null);
}
@@ -180,8 +173,7 @@
@Override
public void notifyThreadDump(String nodeId, String requestId, String threadDumpJSON) throws Exception {
- ThreadDumpResponseFunction tdrf = new ThreadDumpResponseFunction(nodeId, requestId,
- threadDumpJSON);
+ ThreadDumpResponseFunction tdrf = new ThreadDumpResponseFunction(nodeId, requestId, threadDumpJSON);
ipcHandle.send(-1, tdrf, null);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
index a09a8bc..429cb26 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
@@ -65,8 +65,8 @@
List<TaskAttemptDescriptor> taskDescriptors, Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies,
Set<JobFlag> flags, Map<byte[], byte[]> jobParameters, DeployedJobSpecId deployedJobSpecId)
throws Exception {
- StartTasksFunction stf = new StartTasksFunction(deploymentId, jobId, planBytes,
- taskDescriptors, connectorPolicies, flags, jobParameters, deployedJobSpecId);
+ StartTasksFunction stf = new StartTasksFunction(deploymentId, jobId, planBytes, taskDescriptors,
+ connectorPolicies, flags, jobParameters, deployedJobSpecId);
ipcHandle.send(-1, stf, null);
}
@@ -84,8 +84,7 @@
@Override
public void reportPartitionAvailability(PartitionId pid, NetworkAddress networkAddress) throws Exception {
- ReportPartitionAvailabilityFunction rpaf = new ReportPartitionAvailabilityFunction(
- pid, networkAddress);
+ ReportPartitionAvailabilityFunction rpaf = new ReportPartitionAvailabilityFunction(pid, networkAddress);
ipcHandle.send(-1, rpaf, null);
}
@@ -127,8 +126,7 @@
@Override
public void sendApplicationMessageToNC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception {
- SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data,
- deploymentId, nodeId);
+ SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data, deploymentId, nodeId);
ipcHandle.send(-1, fn, null);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
index 90dfc8c..bd98200 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
@@ -45,7 +45,7 @@
return counters;
}
- public abstract ObjectNode toJSON() ;
+ public abstract ObjectNode toJSON();
protected void populateCounters(ObjectNode jo) {
ObjectMapper om = new ObjectMapper();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
index 64d074b..c4eff85 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
@@ -62,7 +62,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
index 5bdb1b5..687874c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
@@ -62,7 +62,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
index 3b54887..f977654 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
@@ -70,7 +70,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java
index eae2eb6..e210963 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java
@@ -24,7 +24,7 @@
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
-public class ShutdownRun implements IShutdownStatusConditionVariable{
+public class ShutdownRun implements IShutdownStatusConditionVariable {
private final Set<String> shutdownNodeIds = new TreeSet<>();
private boolean shutdownSuccess = false;
@@ -60,7 +60,7 @@
return shutdownSuccess;
}
- public synchronized Set<String> getRemainingNodes(){
+ public synchronized Set<String> getRemainingNodes() {
return shutdownNodeIds;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
index dbcba99..8ed7c9e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
@@ -116,9 +116,8 @@
break;
}
if (DEBUG) {
- LOGGER.log(Level.TRACE,
- "Dequeue (" + WorkQueue.this.hashCode() + "): " + dequeueCount.incrementAndGet() + "/"
- + enqueueCount);
+ LOGGER.log(Level.TRACE, "Dequeue (" + WorkQueue.this.hashCode() + "): "
+ + dequeueCount.incrementAndGet() + "/" + enqueueCount);
}
if (LOGGER.isEnabled(r.logLevel())) {
LOGGER.log(r.logLevel(), "Executing: " + r);
@@ -141,8 +140,7 @@
if (waitedDelta > 0 || blockedDelta > 0) {
LOGGER.warn("Work " + r + " waited " + waitedDelta + " times (~"
+ (after.getWaitedTime() - before.getWaitedTime()) + "ms), blocked " + blockedDelta
- + " times (~" + (after.getBlockedTime() - before.getBlockedTime()) + "ms)"
- );
+ + " times (~" + (after.getBlockedTime() - before.getBlockedTime()) + "ms)");
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index 24d72f8..0e74a4c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -302,8 +302,8 @@
messagingNetManager.start();
}
- final InetSocketAddress ccAddress = new InetSocketAddress(ncConfig.getClusterAddress(),
- ncConfig.getClusterPort());
+ final InetSocketAddress ccAddress =
+ new InetSocketAddress(ncConfig.getClusterAddress(), ncConfig.getClusterPort());
this.primaryCcs = addCc(ncConfig.getClusterControllerId(), ccAddress);
workQueue.start();
@@ -390,8 +390,8 @@
NetworkAddress messagingAddress =
messagingNetManager != null ? messagingNetManager.getPublicNetworkAddress() : null;
int allCores = osMXBean.getAvailableProcessors();
- nodeRegistration = new NodeRegistration(ncAddress, id, ncConfig, netAddress, datasetAddress,
- osMXBean.getName(), osMXBean.getArch(), osMXBean.getVersion(), allCores, runtimeMXBean.getVmName(),
+ nodeRegistration = new NodeRegistration(ncAddress, id, ncConfig, netAddress, datasetAddress, osMXBean.getName(),
+ osMXBean.getArch(), osMXBean.getVersion(), allCores, runtimeMXBean.getVmName(),
runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(), runtimeMXBean.getClassPath(),
runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(), runtimeMXBean.getInputArguments(),
runtimeMXBean.getSystemProperties(), hbSchema, messagingAddress, application.getCapacity(),
@@ -403,8 +403,8 @@
// Start heartbeat generator.
if (!heartbeatThreads.containsKey(ccs)) {
- Thread heartbeatThread = new Thread(new HeartbeatTask(ccs, nodeParameters.getHeartbeatPeriod()),
- id + "-Heartbeat");
+ Thread heartbeatThread =
+ new Thread(new HeartbeatTask(ccs, nodeParameters.getHeartbeatPeriod()), id + "-Heartbeat");
heartbeatThread.setPriority(Thread.MAX_PRIORITY);
heartbeatThread.setDaemon(true);
heartbeatThread.start();
@@ -529,7 +529,6 @@
return jpbs;
}
-
public void storeActivityClusterGraph(DeployedJobSpecId deployedJobSpecId, ActivityClusterGraph acg)
throws HyracksException {
if (deployedJobSpecActivityClusterGraphMap.get(deployedJobSpecId.getId()) != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
index 07bb504..6e5a58e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
@@ -460,6 +460,7 @@
public byte[] getJobParameter(byte[] name, int start, int length) throws HyracksException {
return ncs.createOrGetJobParameterByteStore(joblet.getJobId()).getParameterValue(name, start, length);
}
+
public Set<JobFlag> getJobFlags() {
return jobFlags;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
index 24edeb2..8c4fcb0 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
@@ -89,9 +89,8 @@
}
private long read(long offset, ByteBuffer buffer) throws HyracksDataException {
- return datasetMemoryManager != null ?
- resultState.read(datasetMemoryManager, offset, buffer) :
- resultState.read(offset, buffer);
+ return datasetMemoryManager != null ? resultState.read(datasetMemoryManager, offset, buffer)
+ : resultState.read(offset, buffer);
}
private void close() {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java
index afce266..43e3409 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java
@@ -214,7 +214,7 @@
initReadFileHandle();
}
readSize = ioManager.syncRead(readFileHandle, offset, buffer);
- if (readSize < 0){
+ if (readSize < 0) {
throw new HyracksDataException("Premature end of file");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java
index 5380911..1f8669d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java
@@ -19,7 +19,7 @@
package org.apache.hyracks.control.nc.io.profiling;
-public class IOCounterDefault implements IIOCounter{
+public class IOCounterDefault implements IIOCounter {
@Override
public long getReads() {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
index 4787a50..54ac5e5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
@@ -97,8 +97,8 @@
}
if (!failed) {
manager.registerPartition(pid, ctx.getJobletContext().getJobId().getCcId(), taId,
- new MaterializedPartition(ctx, fRef, executor, ctx.getIoManager()),
- PartitionState.COMMITTED, taId.getAttempt() == 0 ? false : true);
+ new MaterializedPartition(ctx, fRef, executor, ctx.getIoManager()), PartitionState.COMMITTED,
+ taId.getAttempt() == 0 ? false : true);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
index 147606d..a782bca 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
@@ -99,8 +99,8 @@
fRefCopy = fRef;
}
writer.open();
- IFileHandle readHandle = fRefCopy == null ? null :
- ioManager.open(fRefCopy, IIOManager.FileReadWriteMode.READ_ONLY,
+ IFileHandle readHandle = fRefCopy == null ? null
+ : ioManager.open(fRefCopy, IIOManager.FileReadWriteMode.READ_ONLY,
IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
try {
if (readHandle == null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java
index bb69eec..9ee4a9e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java
@@ -138,8 +138,7 @@
}
public void updatePartitionState(CcId ccId, PartitionId pid, TaskAttemptId taId, IPartition partition,
- PartitionState state)
- throws HyracksDataException {
+ PartitionState state) throws HyracksDataException {
PartitionDescriptor desc = new PartitionDescriptor(pid, ncs.getId(), taId, partition.isReusable());
desc.setState(state);
try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
index 45e1236..0dc1fb6 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
@@ -36,8 +36,8 @@
this.reader = reader;
this.openCounter = ctx.getCounterContext().getCounter(cdId + ".receiver." + receiverIndex + ".open", true);
this.closeCounter = ctx.getCounterContext().getCounter(cdId + ".receiver." + receiverIndex + ".close", true);
- this.frameCounter = ctx.getCounterContext()
- .getCounter(cdId + ".receiver." + receiverIndex + ".nextFrame", true);
+ this.frameCounter =
+ ctx.getCounterContext().getCounter(cdId + ".receiver." + receiverIndex + ".nextFrame", true);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java
index 4dc6bc9..38ae95b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java
@@ -51,8 +51,7 @@
}
if (bytes > FrameConstants.MAX_FRAMESIZE) {
throw new HyracksDataException(
- "Unable to allocate frame larger than:" + FrameConstants.MAX_FRAMESIZE
- + " bytes");
+ "Unable to allocate frame larger than:" + FrameConstants.MAX_FRAMESIZE + " bytes");
}
ByteBuffer buffer = ByteBuffer.allocate(bytes);
FrameHelper.serializeFrameSize(buffer, bytes / minFrameSize);
@@ -67,8 +66,8 @@
return allocateFrame(newSizeInBytes);
} else {
if (newSizeInBytes > FrameConstants.MAX_FRAMESIZE) {
- throw new HyracksDataException("Unable to allocate frame of size bigger than: "
- + FrameConstants.MAX_FRAMESIZE + " bytes");
+ throw new HyracksDataException(
+ "Unable to allocate frame of size bigger than: " + FrameConstants.MAX_FRAMESIZE + " bytes");
}
ByteBuffer buffer = allocateFrame(newSizeInBytes);
int limit = Math.min(newSizeInBytes, tobeDeallocate.capacity());
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java
index d1385ec..dfda463 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java
@@ -54,8 +54,8 @@
public void run() {
DeploymentStatus status;
try {
- DeploymentUtils.deploy(deploymentId, binaryURLs, ncs.getContext()
- .getJobSerializerDeserializerContainer(), ncs.getServerContext(), true);
+ DeploymentUtils.deploy(deploymentId, binaryURLs, ncs.getContext().getJobSerializerDeserializerContainer(),
+ ncs.getServerContext(), true);
status = DeploymentStatus.SUCCEED;
} catch (Exception e) {
status = DeploymentStatus.FAIL;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
index 3871302..cfd69ce 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
@@ -50,9 +50,13 @@
Map<JobId, Joblet> jobletMap = ncs.getJobletMap();
Joblet ji = jobletMap.get(pid.getJobId());
if (ji != null) {
- PartitionChannel channel = new PartitionChannel(pid, new NetworkInputChannel(ncs.getNetworkManager(),
- new InetSocketAddress(InetAddress.getByAddress(networkAddress.lookupIpAddress()),
- networkAddress.getPort()), pid, 5));
+ PartitionChannel channel =
+ new PartitionChannel(pid,
+ new NetworkInputChannel(ncs.getNetworkManager(),
+ new InetSocketAddress(
+ InetAddress.getByAddress(networkAddress.lookupIpAddress()),
+ networkAddress.getPort()),
+ pid, 5));
ji.reportPartitionAvailability(channel);
}
} catch (Exception e) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
index 10fa679..32f6bf3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
@@ -35,20 +35,16 @@
* If an option is specified both in the config file and on the command line, the config file
* version will take precedence.
*/
- @Option(name = "-config-file", required = false,
- usage = "Local NC configuration file (default: none)")
+ @Option(name = "-config-file", required = false, usage = "Local NC configuration file (default: none)")
public String configFile = null;
- @Option(name = "-address", required = false,
- usage = "Address to listen on for connections from CC (default: all addresses)")
+ @Option(name = "-address", required = false, usage = "Address to listen on for connections from CC (default: all addresses)")
public String address = null;
- @Option(name = "-port", required = false,
- usage = "Port to listen on for connections from CC (default: 9090)")
+ @Option(name = "-port", required = false, usage = "Port to listen on for connections from CC (default: 9090)")
public int port = 9090;
- @Option(name = "-logdir", required = false,
- usage = "Directory to log NC output ('-' for stdout of NC service; default: $app.home/logs)")
+ @Option(name = "-logdir", required = false, usage = "Directory to log NC output ('-' for stdout of NC service; default: $app.home/logs)")
public String logdir = null;
private Ini ini = null;
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java
index 2ce5291..98700a3 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java
@@ -88,8 +88,8 @@
int thisArrayStart = this.getContentStartOffset();
int thatArrayStart = thatStart + getNumberBytesToStoreMeta(thatArrayLen);
- for (int thisIndex = 0, thatIndex = 0;
- thisIndex < thisArrayLen && thatIndex < thatArrayLen; ++thisIndex, ++thatIndex) {
+ for (int thisIndex = 0, thatIndex = 0; thisIndex < thisArrayLen
+ && thatIndex < thatArrayLen; ++thisIndex, ++thatIndex) {
if (this.bytes[thisArrayStart + thisIndex] != thatBytes[thatArrayStart + thatIndex]) {
return (0xff & this.bytes[thisArrayStart + thisIndex]) - (0xff & thatBytes[thatArrayStart + thatIndex]);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
index 0850b04..86a6f9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
@@ -512,8 +512,8 @@
boolean isLetter = Character.isLetter(originalChar);
// Make the first character into upper case while the later ones into lower case.
- char resultChar = toUpperCase && isLetter ? Character.toUpperCase(originalChar) : (isLetter ? Character
- .toLowerCase(originalChar) : originalChar);
+ char resultChar = toUpperCase && isLetter ? Character.toUpperCase(originalChar)
+ : (isLetter ? Character.toLowerCase(originalChar) : originalChar);
builder.appendChar(resultChar);
byteIndex += src.charSize(srcStart + byteIndex);
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java
index 1996b4e..c5b8e8a 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java
@@ -119,7 +119,6 @@
return putFindInternal(key, false, keyArray, increaseFoundCount);
}
-
// Put an entry or find an entry
private int putFindInternal(BinaryEntry key, boolean isInsert, byte[] keyArray, boolean increaseFoundCount)
throws HyracksDataException {
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java
index fe04146..2300c06 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java
@@ -33,7 +33,8 @@
}
}
- public void appendUtf8StringPointable(UTF8StringPointable src, int byteStartOffset, int byteLength) throws IOException {
+ public void appendUtf8StringPointable(UTF8StringPointable src, int byteStartOffset, int byteLength)
+ throws IOException {
out.write(src.getByteArray(), byteStartOffset, byteLength);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java
index fbc7aea..f7146df 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java
@@ -27,8 +27,8 @@
@Test
public void testCompareTo() throws Exception {
- ByteArrayPointable byteArrayPointable = ByteArrayPointable
- .generatePointableFromPureBytes(new byte[] { 1, 2, 3, 4 });
+ ByteArrayPointable byteArrayPointable =
+ ByteArrayPointable.generatePointableFromPureBytes(new byte[] { 1, 2, 3, 4 });
testEqual(byteArrayPointable, ByteArrayPointable.generatePointableFromPureBytes(new byte[] { 1, 2, 3, 4 }));
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java
index 93b2290..302e7a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java
@@ -34,8 +34,8 @@
public class UTF8StringPointableTest {
public static UTF8StringPointable STRING_EMPTY = generateUTF8Pointable(UTF8StringSample.EMPTY_STRING);
public static UTF8StringPointable STRING_UTF8_MIX = generateUTF8Pointable(UTF8StringSample.STRING_UTF8_MIX);
- public static UTF8StringPointable STRING_UTF8_MIX_LOWERCASE = generateUTF8Pointable(
- UTF8StringSample.STRING_UTF8_MIX_LOWERCASE);
+ public static UTF8StringPointable STRING_UTF8_MIX_LOWERCASE =
+ generateUTF8Pointable(UTF8StringSample.STRING_UTF8_MIX_LOWERCASE);
public static UTF8StringPointable STRING_LEN_127 = generateUTF8Pointable(UTF8StringSample.STRING_LEN_127);
public static UTF8StringPointable STRING_LEN_128 = generateUTF8Pointable(UTF8StringSample.STRING_LEN_128);
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java
index fc8a6ab..c001ab3 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java
@@ -49,7 +49,7 @@
}
@Test
- public void testIterator(){
+ public void testIterator() {
testEachIterator(UTF8StringSample.EMPTY_STRING);
testEachIterator(UTF8StringSample.STRING_UTF8_MIX);
testEachIterator(UTF8StringSample.STRING_LEN_128);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java
index 8765bf1..5dc1fa3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java
@@ -76,9 +76,9 @@
IntSerDeUtils.putInt(array, tupleDataEndOffset + currentField * 4, lastFieldEndOffset);
if (++currentField == fieldCount) {
tupleDataEndOffset += fieldCount * 4 + lastFieldEndOffset;
- IntSerDeUtils
- .putInt(array, FrameHelper.getTupleCountOffset(frame.getFrameSize()) - 4 * (tupleCount + 1),
- tupleDataEndOffset);
+ IntSerDeUtils.putInt(array,
+ FrameHelper.getTupleCountOffset(frame.getFrameSize()) - 4 * (tupleCount + 1),
+ tupleDataEndOffset);
++tupleCount;
IntSerDeUtils.putInt(array, FrameHelper.getTupleCountOffset(frame.getFrameSize()), tupleCount);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java
index b464f8e..4a324ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java
@@ -47,9 +47,8 @@
@Override
public int getTupleStartOffset(int tupleIndex) {
- int offset = tupleIndex == 0 ?
- FrameConstants.TUPLE_START_OFFSET :
- IntSerDeUtils.getInt(getBuffer().array(), tupleCountOffset - 4 * tupleIndex);
+ int offset = tupleIndex == 0 ? FrameConstants.TUPLE_START_OFFSET
+ : IntSerDeUtils.getInt(getBuffer().array(), tupleCountOffset - 4 * tupleIndex);
return offset;
}
@@ -65,8 +64,8 @@
@Override
public int getFieldStartOffset(int tupleIndex, int fIdx) {
- return fIdx == 0 ? 0 : IntSerDeUtils.getInt(getBuffer().array(),
- getTupleStartOffset(tupleIndex) + (fIdx - 1) * 4);
+ return fIdx == 0 ? 0
+ : IntSerDeUtils.getInt(getBuffer().array(), getTupleStartOffset(tupleIndex) + (fIdx - 1) * 4);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
index ee5a041..dc66d19 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
@@ -55,8 +55,8 @@
IBinaryHashFunction hashFn = hashFunctions[j];
int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
- int fh = hashFn
- .hash(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd - fStart);
+ int fh = hashFn.hash(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+ fEnd - fStart);
h = h * 31 + fh;
}
if (h < 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
index 458171c..4385bd5 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
@@ -30,7 +30,8 @@
private final int[] hashFields;
private final IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories;
- public FieldHashPartitionComputerFamily(int[] hashFields, IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories) {
+ public FieldHashPartitionComputerFamily(int[] hashFields,
+ IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories) {
this.hashFields = hashFields;
this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
}
@@ -52,8 +53,8 @@
IBinaryHashFunction hashFn = hashFunctions[j];
int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
- int fh = hashFn
- .hash(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd - fStart);
+ int fh = hashFn.hash(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+ fEnd - fStart);
h += fh;
}
if (h < 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
index f4da9bf..d58a248 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
@@ -86,8 +86,8 @@
int fIdx = rangeFields[f];
int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
- c = comparators[f].compare(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd
- - fStart, rangeMap.getByteArray(fieldIndex, f), rangeMap.getStartOffset(fieldIndex, f),
+ c = comparators[f].compare(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+ fEnd - fStart, rangeMap.getByteArray(fieldIndex, f), rangeMap.getStartOffset(fieldIndex, f),
rangeMap.getLength(fieldIndex, f));
if (c != 0) {
return c;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
index 81f06da..e99a2ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
@@ -112,7 +112,8 @@
return f.createBinaryComparator();
}
- public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes, int numSerdes) {
+ public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes,
+ int numSerdes) {
IBinaryComparatorFactory[] comparatorsFactories = new IBinaryComparatorFactory[numSerdes];
for (int i = 0; i < numSerdes; i++) {
comparatorsFactories[i] = serdeToComparatorFactory(serdes[i]);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
index 52bf893..08ed922 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
@@ -67,7 +67,7 @@
}
public static void createIntegerTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, boolean filtered,
- final int... fields) throws HyracksDataException {
+ final int... fields) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
tupleBuilder.reset();
for (final int i : fields) {
@@ -88,8 +88,8 @@
public static ITupleReference createIntegerTuple(boolean filtered, final int... fields)
throws HyracksDataException {
- ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fields.length + 1)
- : new ArrayTupleBuilder(fields.length);
+ ArrayTupleBuilder tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fields.length + 1) : new ArrayTupleBuilder(fields.length);
ArrayTupleReference tuple = new ArrayTupleReference();
createIntegerTuple(tupleBuilder, tuple, fields);
return tuple;
@@ -122,8 +122,8 @@
StringBuilder strBuilder = new StringBuilder();
int numPrintFields = Math.min(tuple.getFieldCount(), fields.length);
for (int i = 0; i < numPrintFields; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Object o = fields[i].deserialize(dataIn);
strBuilder.append(o.toString());
@@ -139,8 +139,8 @@
int numFields = Math.min(tuple.getFieldCount(), fields.length);
Object[] objs = new Object[numFields];
for (int i = 0; i < numFields; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
objs[i] = fields[i].deserialize(dataIn);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
index 766c77a..c39648c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
@@ -155,8 +155,8 @@
private IFrameTupleAccessor prepareData(DATA_TYPE type) throws HyracksDataException {
IFrameTupleAccessor accessor = new FrameTupleAccessor(recordDescriptor);
- IFrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(new FrameManager(INPUT_BUFFER_SIZE)),
- true);
+ IFrameTupleAppender appender =
+ new FrameTupleAppender(new VSizeFrame(new FrameManager(INPUT_BUFFER_SIZE)), true);
int i = 0;
do {
switch (type) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
index e338961..c6512929 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
@@ -92,7 +92,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode jop = om.createObjectNode();
jop.put("id", String.valueOf(getOperatorId()));
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
index 93a8120..6ae5b28 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
@@ -22,8 +22,8 @@
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-public abstract class AbstractUnaryInputOperatorNodePushable extends AbstractOperatorNodePushable implements
- IFrameWriter {
+public abstract class AbstractUnaryInputOperatorNodePushable extends AbstractOperatorNodePushable
+ implements IFrameWriter {
protected IFrameWriter writer;
protected RecordDescriptor recordDesc;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
index 4499e32c..47b11ce 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
@@ -64,7 +64,7 @@
private ByteBuffer mergeExistingFrames(int frameSize) throws HyracksDataException {
int mergedSize = memBudget - allocated;
- for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext(); ) {
+ for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext();) {
ByteBuffer buffer = iter.next();
iter.remove();
mergedSize += buffer.capacity();
@@ -88,7 +88,7 @@
}
private ByteBuffer findExistingFrame(int frameSize) {
- for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext(); ) {
+ for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext();) {
ByteBuffer next = iter.next();
if (next.capacity() >= frameSize) {
iter.remove();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
index 700500b..6a60813 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
@@ -59,4 +59,3 @@
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
index 4359b54..27e2671 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
@@ -65,8 +65,8 @@
}
List<IFrameReader> batch = new ArrayList<IFrameReader>(nSenders);
pbm.getNextBatch(batch, nSenders);
- merger = new RunMergingFrameReader(ctx, batch, inFrames, sortFields,
- comparators, nmkComputer, recordDescriptor);
+ merger = new RunMergingFrameReader(ctx, batch, inFrames, sortFields, comparators, nmkComputer,
+ recordDescriptor);
} else {
// multi level merge.
throw new HyracksDataException("Not yet supported");
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
index b1cd83e..920fdb8 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
@@ -82,8 +82,8 @@
expectedPartitions.set(i);
}
}
- NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
- expectedPartitions);
+ NonDeterministicChannelReader channelReader =
+ new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
return new PartitionCollector(ctx, getConnectorId(), receiverIndex, expectedPartitions, frameReader,
channelReader);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
index d26b9ef..02fbedb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
@@ -54,8 +54,8 @@
int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
BitSet expectedPartitions = new BitSet(nProducerPartitions);
expectedPartitions.set(0, nProducerPartitions);
- NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
- expectedPartitions);
+ NonDeterministicChannelReader channelReader =
+ new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, frameReader, channelReader);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
index edcad42..026ca5e 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
@@ -69,8 +69,8 @@
public IFrameWriter createPartitioner(IHyracksTaskContext ctx, RecordDescriptor recordDesc,
IPartitionWriterFactory edwFactory, int index, int nProducerPartitions, int nConsumerPartitions)
throws HyracksDataException {
- final PartitionDataWriter hashWriter = new PartitionDataWriter(ctx, nConsumerPartitions, edwFactory, recordDesc,
- tpcf.createPartitioner());
+ final PartitionDataWriter hashWriter =
+ new PartitionDataWriter(ctx, nConsumerPartitions, edwFactory, recordDesc, tpcf.createPartitioner());
return hashWriter;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
index eda353b..78428a3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
@@ -57,8 +57,8 @@
int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
BitSet expectedPartitions = new BitSet(nProducerPartitions);
expectedPartitions.set(index);
- NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
- expectedPartitions);
+ NonDeterministicChannelReader channelReader =
+ new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, frameReader, channelReader);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
index 97d5f2b..e1df709 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
@@ -30,7 +30,7 @@
public PartitionWithMessageDataWriter(IHyracksTaskContext ctx, int consumerPartitionCount,
IPartitionWriterFactory pwFactory, RecordDescriptor recordDescriptor, ITuplePartitionComputer tpc)
- throws HyracksDataException {
+ throws HyracksDataException {
super(ctx, consumerPartitionCount, pwFactory, recordDescriptor, tpc);
// since the message partition writer sends broadcast messages, we allocate frames when we create the writer
for (int i = 0; i < consumerPartitionCount; ++i) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
index 543ad40..18dc9c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
@@ -95,7 +95,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(ctx.getIoManager(),
- partition), recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
+ return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(ctx.getIoManager(), partition),
+ recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
index d121ec4..175bdae 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
@@ -67,8 +67,8 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(valueParsers.length);
DataOutput dos = tb.getDataOutput();
- FieldCursorForDelimitedDataParser cursor = new FieldCursorForDelimitedDataParser(
- new InputStreamReader(in), fieldDelimiter, quote);
+ FieldCursorForDelimitedDataParser cursor =
+ new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
while (cursor.nextRecord()) {
tb.reset();
for (int i = 0; i < valueParsers.length; ++i) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
index 40f02f9..3232527 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
@@ -84,17 +84,18 @@
intermediateResultKeys[i] = i;
}
- final FrameTuplePairComparator ftpcInputCompareToAggregate = new FrameTuplePairComparator(keyFields,
- intermediateResultKeys, comparators);
+ final FrameTuplePairComparator ftpcInputCompareToAggregate =
+ new FrameTuplePairComparator(keyFields, intermediateResultKeys, comparators);
- final ITuplePartitionComputer tpc = new FieldHashPartitionComputerFamily(keyFields, hashFunctionFamilies)
- .createPartitioner(seed);
+ final ITuplePartitionComputer tpc =
+ new FieldHashPartitionComputerFamily(keyFields, hashFunctionFamilies).createPartitioner(seed);
// For calculating hash value for the already aggregated tuples (not incoming tuples)
// This computer is required to calculate the hash value of a aggregated tuple
// while doing the garbage collection work on Hash Table.
- final ITuplePartitionComputer tpcIntermediate = new FieldHashPartitionComputerFamily(intermediateResultKeys,
- hashFunctionFamilies).createPartitioner(seed);
+ final ITuplePartitionComputer tpcIntermediate =
+ new FieldHashPartitionComputerFamily(intermediateResultKeys, hashFunctionFamilies)
+ .createPartitioner(seed);
final IAggregatorDescriptor aggregator = aggregateFactory.createAggregator(ctx, inRecordDescriptor,
outRecordDescriptor, keyFields, intermediateResultKeys, null, -1);
@@ -110,9 +111,8 @@
final int numPartitions = getNumOfPartitions(inputDataBytesSize / ctx.getInitialFrameSize(), memoryBudget);
final int entriesPerPartition = (int) Math.ceil(1.0 * tableSize / numPartitions);
if (LOGGER.isDebugEnabled()) {
- LOGGER.debug(
- "created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize + " #partitions:"
- + numPartitions);
+ LOGGER.debug("created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize
+ + " #partitions:" + numPartitions);
}
final ArrayTupleBuilder outputTupleBuilder = new ArrayTupleBuilder(outRecordDescriptor.getFields().length);
@@ -122,14 +122,14 @@
private final TuplePointer pointer = new TuplePointer();
private final BitSet spilledSet = new BitSet(numPartitions);
// This frame pool will be shared by both data table and hash table.
- private final IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx,
- framesLimit * ctx.getInitialFrameSize());
+ private final IDeallocatableFramePool framePool =
+ new DeallocatableFramePool(ctx, framesLimit * ctx.getInitialFrameSize());
// buffer manager for hash table
- private final ISimpleFrameBufferManager bufferManagerForHashTable = new FramePoolBackedFrameBufferManager(
- framePool);
+ private final ISimpleFrameBufferManager bufferManagerForHashTable =
+ new FramePoolBackedFrameBufferManager(framePool);
- private final ISerializableTable hashTableForTuplePointer = new SerializableHashTable(tableSize, ctx,
- bufferManagerForHashTable);
+ private final ISerializableTable hashTableForTuplePointer =
+ new SerializableHashTable(tableSize, ctx, bufferManagerForHashTable);
// buffer manager for data table
final IPartitionedTupleBufferManager bufferManager = new VPartitionTupleBufferManager(
@@ -138,8 +138,8 @@
final ITuplePointerAccessor bufferAccessor = bufferManager.getTuplePointerAccessor(outRecordDescriptor);
- private final PreferToSpillFullyOccupiedFramePolicy spillPolicy = new PreferToSpillFullyOccupiedFramePolicy(
- bufferManager, spilledSet);
+ private final PreferToSpillFullyOccupiedFramePolicy spillPolicy =
+ new PreferToSpillFullyOccupiedFramePolicy(bufferManager, spilledSet);
private final FrameTupleAppender outputAppender = new FrameTupleAppender(new VSizeFrame(ctx));
@@ -157,8 +157,8 @@
// Checks whether the garbage collection is required and conducts a garbage collection if so.
if (hashTableForTuplePointer.isGarbageCollectionNeeded()) {
- int numberOfFramesReclaimed = hashTableForTuplePointer.collectGarbage(bufferAccessor,
- tpcIntermediate);
+ int numberOfFramesReclaimed =
+ hashTableForTuplePointer.collectGarbage(bufferAccessor, tpcIntermediate);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Garbage Collection on Hash table is done. Deallocated frames:"
+ numberOfFramesReclaimed);
@@ -310,8 +310,8 @@
// partition again and again.
return 2;
}
- long numberOfPartitions = (long) (Math
- .ceil((nubmerOfInputFrames * FUDGE_FACTOR - frameLimit) / (frameLimit - 1)));
+ long numberOfPartitions =
+ (long) (Math.ceil((nubmerOfInputFrames * FUDGE_FACTOR - frameLimit) / (frameLimit - 1)));
numberOfPartitions = Math.max(2, numberOfPartitions);
if (numberOfPartitions > frameLimit) {
numberOfPartitions = (long) Math.ceil(Math.sqrt(nubmerOfInputFrames * FUDGE_FACTOR));
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
index 7acd687..cb32c4a 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
@@ -113,7 +113,8 @@
int count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
try {
@@ -139,7 +140,8 @@
int sum = 0, count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
index b77c91c..290cc58 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
@@ -118,7 +118,8 @@
int sum = 0, count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
@@ -156,8 +157,10 @@
int count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
- count += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart + 4);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ count += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart + 4);
if (!useObjectState) {
try {
fieldOutput.writeInt(sum);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
index 771303f..fc8d956 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
@@ -35,8 +35,7 @@
/**
*
*/
-public class FloatSumFieldAggregatorFactory implements
- IFieldAggregateDescriptorFactory {
+public class FloatSumFieldAggregatorFactory implements IFieldAggregateDescriptorFactory {
private static final long serialVersionUID = 1L;
@@ -44,7 +43,7 @@
private final boolean useObjectState;
- public FloatSumFieldAggregatorFactory(int aggField, boolean useObjState){
+ public FloatSumFieldAggregatorFactory(int aggField, boolean useObjState) {
this.aggField = aggField;
this.useObjectState = useObjState;
}
@@ -53,8 +52,7 @@
* @see org.apache.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory#createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext, org.apache.hyracks.api.dataflow.value.RecordDescriptor, org.apache.hyracks.api.dataflow.value.RecordDescriptor)
*/
@Override
- public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx,
- RecordDescriptor inRecordDescriptor,
+ public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor,
RecordDescriptor outRecordDescriptor) throws HyracksDataException {
return new IFieldAggregateDescriptor() {
@@ -64,8 +62,8 @@
}
@Override
- public void outputPartialResult(DataOutput fieldOutput, byte[] data,
- int offset, AggregateState state) throws HyracksDataException {
+ public void outputPartialResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state)
+ throws HyracksDataException {
float sum;
if (!useObjectState) {
sum = FloatPointable.getFloat(data, offset);
@@ -80,8 +78,8 @@
}
@Override
- public void outputFinalResult(DataOutput fieldOutput, byte[] data,
- int offset, AggregateState state) throws HyracksDataException {
+ public void outputFinalResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state)
+ throws HyracksDataException {
float sum;
if (!useObjectState) {
sum = FloatPointable.getFloat(data, offset);
@@ -106,14 +104,14 @@
}
@Override
- public void init(IFrameTupleAccessor accessor, int tIndex,
- DataOutput fieldOutput, AggregateState state)
+ public void init(IFrameTupleAccessor accessor, int tIndex, DataOutput fieldOutput, AggregateState state)
throws HyracksDataException {
float sum = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += FloatPointable.getFloat(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += FloatPointable.getFloat(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
try {
@@ -138,13 +136,13 @@
}
@Override
- public void aggregate(IFrameTupleAccessor accessor, int tIndex,
- byte[] data, int offset, AggregateState state)
- throws HyracksDataException {
+ public void aggregate(IFrameTupleAccessor accessor, int tIndex, byte[] data, int offset,
+ AggregateState state) throws HyracksDataException {
float sum = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += FloatPointable.getFloat(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += FloatPointable.getFloat(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
index 511b651..90e1474 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
@@ -107,7 +107,8 @@
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
try {
@@ -143,7 +144,8 @@
int sum = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
index 6900918..d43d4fd 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
@@ -113,9 +113,9 @@
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
int fieldLength = accessor.getFieldLength(tIndex, aggField);
- String strField = utf8SerializerDeserializer.deserialize(new DataInputStream(
- new ByteArrayInputStream(accessor.getBuffer().array(), tupleOffset
- + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
+ String strField = utf8SerializerDeserializer
+ .deserialize(new DataInputStream(new ByteArrayInputStream(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
if (hasBinaryState) {
// Object-binary-state
Object[] storedState;
@@ -158,9 +158,9 @@
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
int fieldLength = accessor.getFieldLength(tIndex, aggField);
- String strField = utf8SerializerDeserializer.deserialize(new DataInputStream(
- new ByteArrayInputStream(accessor.getBuffer().array(), tupleOffset
- + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
+ String strField = utf8SerializerDeserializer
+ .deserialize(new DataInputStream(new ByteArrayInputStream(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
if (hasBinaryState) {
int stateIdx = IntegerPointable.getInteger(data, offset);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
index 595e2c4..d2aa35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
@@ -95,8 +95,8 @@
}
@Override
- public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex,
- AggregateState state) throws HyracksDataException {
+ public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor,
+ int tIndex, AggregateState state) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
int tupleOffset = stateAccessor.getTupleStartOffset(tIndex);
@@ -151,8 +151,8 @@
int fieldIndex = 0;
for (int i = 0; i < aggregators.length; i++) {
if (aggregators[i].needsBinaryState()) {
- int stateFieldOffset = stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length
- + fieldIndex);
+ int stateFieldOffset =
+ stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length + fieldIndex);
aggregators[i].aggregate(accessor, tIndex, stateAccessor.getBuffer().array(),
stateTupleOffset + stateAccessor.getFieldSlotsLength() + stateFieldOffset,
((AggregateState[]) state.state)[i]);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
index 7e6e147..43f57af 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
@@ -71,8 +71,8 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- this.firstNormalizerComputer = firstNormalizerFactory == null ? null
- : firstNormalizerFactory.createNormalizedKeyComputer();
+ this.firstNormalizerComputer =
+ firstNormalizerFactory == null ? null : firstNormalizerFactory.createNormalizedKeyComputer();
this.spillableTableFactory = spillableTableFactory;
this.inRecordDescriptor = inRecordDescriptor;
this.outRecordDescriptor = outRecordDescriptor;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
index 23dee02..6dea186 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
@@ -124,7 +124,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new ExternalGroupBuildOperatorNodePushable(ctx, new TaskId(getActivityId(), partition), tableSize,
fileSize, keyFields, framesLimit, comparatorFactories, firstNormalizerFactory,
partialAggregatorFactory, recordDescProvider.getInputRecordDescriptor(getActivityId(), 0),
@@ -142,7 +142,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new ExternalGroupWriteOperatorNodePushable(ctx,
new TaskId(new ActivityId(getOperatorId(), AGGREGATE_ACTIVITY_ID), partition),
spillableTableFactory, partialRecDesc, outRecDesc, framesLimit, keyFields, firstNormalizerFactory,
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
index fb88775..95994f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
@@ -130,8 +130,8 @@
runs[i].getFileSize(), mergeGroupFields, groupByComparators, nmkComputer,
mergeAggregatorFactory, partialAggRecordDesc, outRecordDesc, frameLimit, level);
RunFileWriter[] runFileWriters = new RunFileWriter[partitionTable.getNumPartitions()];
- int[] sizeInTuplesNextLevel = buildGroup(runs[i].createDeleteOnCloseReader(), partitionTable,
- runFileWriters);
+ int[] sizeInTuplesNextLevel =
+ buildGroup(runs[i].createDeleteOnCloseReader(), partitionTable, runFileWriters);
for (int idFile = 0; idFile < runFileWriters.length; idFile++) {
if (runFileWriters[idFile] != null) {
generatedRuns.add(runFileWriters[idFile]);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
index d29e9ab..8e7777f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
@@ -63,8 +63,7 @@
}
}
- private void flushPartitionToRun(int partition, RunFileWriter writer)
- throws HyracksDataException {
+ private void flushPartitionToRun(int partition, RunFileWriter writer) throws HyracksDataException {
try {
spilledNumTuples[partition] += table.flushFrames(partition, writer, AggregateType.PARTIAL);
table.clear(partition);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
index db6102e..ca78046 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
@@ -82,9 +82,8 @@
// Deducts input/output frames.
this.memoryLimit = framesLimit <= 0 ? -1 : ((long) (framesLimit - 2)) * ctx.getInitialFrameSize();
- this.aggregator =
- aggregatorFactory.createAggregator(ctx, inRecordDesc, outRecordDesc, groupFields, groupFields, writer,
- this.memoryLimit);
+ this.aggregator = aggregatorFactory.createAggregator(ctx, inRecordDesc, outRecordDesc, groupFields, groupFields,
+ writer, this.memoryLimit);
this.aggregateState = aggregator.createAggregateStates();
copyFrame = new VSizeFrame(ctx);
inFrameAccessor = new FrameTupleAccessor(inRecordDesc);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
index 10cc954..ec652eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
@@ -197,11 +197,10 @@
this.allProjectFields = projectedFields;
this.firstKeyNormalizerComputer =
firstKeyNormalizerFactory != null ? firstKeyNormalizerFactory.createNormalizedKeyComputer() : null;
- this.normalizedKeyDecisive =
- firstKeyNormalizerFactory != null
- ? firstKeyNormalizerFactory.getNormalizedKeyProperties().isDecisive()
- && compareFields[0].length == 1
- : false;
+ this.normalizedKeyDecisive = firstKeyNormalizerFactory != null
+ ? firstKeyNormalizerFactory.getNormalizedKeyProperties().isDecisive()
+ && compareFields[0].length == 1
+ : false;
comparators = new IBinaryComparator[compareFields[0].length];
for (int i = 0; i < comparators.length; i++) {
comparators[i] = comparatorFactory[i].createBinaryComparator();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
index 665bb2b..bee0590 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
@@ -176,22 +176,22 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length]
- : null;
+ final IMissingWriter[] nullWriters1 =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length] : null;
if (isLeftOuter) {
for (int i = 0; i < nonMatchWriterFactories1.length; i++) {
nullWriters1[i] = nonMatchWriterFactories1[i].createMissingWriter();
}
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(
ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition));
private final FrameTupleAccessor accessorBuild = new FrameTupleAccessor(rd1);
- private final ITuplePartitionComputer hpcBuild = new FieldHashPartitionComputerFactory(keys1,
- hashFunctionFactories).createPartitioner();
+ private final ITuplePartitionComputer hpcBuild =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
private final FrameTupleAppender appender = new FrameTupleAppender();
private final FrameTupleAppender ftappender = new FrameTupleAppender();
private IFrame[] bufferForPartitions;
@@ -285,8 +285,8 @@
if (memsize > inputsize0) {
state.nPartitions = 0;
} else {
- state.nPartitions = (int) (Math
- .ceil((inputsize0 * factor / nPartitions - memsize) / (memsize - 1)));
+ state.nPartitions =
+ (int) (Math.ceil((inputsize0 * factor / nPartitions - memsize) / (memsize - 1)));
}
if (state.nPartitions <= 0) {
// becomes in-memory HJ
@@ -303,10 +303,10 @@
throw new HyracksDataException("not enough memory");
}
- ITuplePartitionComputer hpc0 = new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories)
- .createPartitioner();
- ITuplePartitionComputer hpc1 = new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories)
- .createPartitioner();
+ ITuplePartitionComputer hpc0 =
+ new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner();
+ ITuplePartitionComputer hpc1 =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
int tableSize = (int) (state.memoryForHashtable * recordsPerFrame * factor);
ISerializableTable table = new SimpleSerializableHashTable(tableSize, ctx);
state.joiner =
@@ -369,23 +369,23 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length]
- : null;
+ final IMissingWriter[] nullWriters1 =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length] : null;
if (isLeftOuter) {
for (int i = 0; i < nonMatchWriterFactories1.length; i++) {
nullWriters1[i] = nonMatchWriterFactories1[i].createMissingWriter();
}
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
IOperatorNodePushable op = new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private BuildAndPartitionTaskState state;
private final FrameTupleAccessor accessorProbe = new FrameTupleAccessor(rd0);
- private final ITuplePartitionComputerFactory hpcf0 = new FieldHashPartitionComputerFactory(keys0,
- hashFunctionFactories);
- private final ITuplePartitionComputerFactory hpcf1 = new FieldHashPartitionComputerFactory(keys1,
- hashFunctionFactories);
+ private final ITuplePartitionComputerFactory hpcf0 =
+ new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories);
+ private final ITuplePartitionComputerFactory hpcf1 =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories);
private final ITuplePartitionComputer hpcProbe = hpcf0.createPartitioner();
private final FrameTupleAppender appender = new FrameTupleAppender();
@@ -476,10 +476,10 @@
} finally {
state.joiner.releaseMemory();
}
- ITuplePartitionComputer hpcRep0 = new RepartitionComputerFactory(state.nPartitions, hpcf0)
- .createPartitioner();
- ITuplePartitionComputer hpcRep1 = new RepartitionComputerFactory(state.nPartitions, hpcf1)
- .createPartitioner();
+ ITuplePartitionComputer hpcRep0 =
+ new RepartitionComputerFactory(state.nPartitions, hpcf0).createPartitioner();
+ ITuplePartitionComputer hpcRep1 =
+ new RepartitionComputerFactory(state.nPartitions, hpcf1).createPartitioner();
if (state.memoryForHashtable != memsize - 2) {
for (int i = 0; i < state.nPartitions; i++) {
ByteBuffer buf = bufferForPartitions[i].getBuffer();
@@ -505,11 +505,10 @@
continue;
}
table.reset();
- InMemoryHashJoin joiner =
- new InMemoryHashJoin(ctx, new FrameTupleAccessor(rd0), hpcRep0,
- new FrameTupleAccessor(rd1), rd1, hpcRep1,
- new FrameTuplePairComparator(keys0, keys1, comparators), isLeftOuter,
- nullWriters1, table, predEvaluator, null);
+ InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(rd0),
+ hpcRep0, new FrameTupleAccessor(rd1), rd1, hpcRep1,
+ new FrameTuplePairComparator(keys0, keys1, comparators), isLeftOuter,
+ nullWriters1, table, predEvaluator, null);
if (buildWriter != null) {
RunFileReader buildReader = buildWriter.createDeleteOnCloseReader();
@@ -559,8 +558,8 @@
private void write(int i, ByteBuffer head) throws HyracksDataException {
RunFileWriter writer = probeWriters[i];
if (writer == null) {
- FileReference file = ctx
- .createManagedWorkspaceFile(PartitionAndJoinActivityNode.class.getSimpleName());
+ FileReference file =
+ ctx.createManagedWorkspaceFile(PartitionAndJoinActivityNode.class.getSimpleName());
writer = new RunFileWriter(file, ctx.getIoManager());
writer.open();
probeWriters[i] = writer;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
index a51b780..10c6227 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -66,22 +66,20 @@
private static final Logger LOGGER = LogManager.getLogger();
- public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
- ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild,
- ITuplePartitionComputer tpcBuild, FrameTuplePairComparator comparator, boolean isLeftOuter,
- IMissingWriter[] missingWritersBuild, ISerializableTable table, IPredicateEvaluator predEval,
- ISimpleFrameBufferManager bufferManager)
+ public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe, ITuplePartitionComputer tpcProbe,
+ FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild,
+ FrameTuplePairComparator comparator, boolean isLeftOuter, IMissingWriter[] missingWritersBuild,
+ ISerializableTable table, IPredicateEvaluator predEval, ISimpleFrameBufferManager bufferManager)
throws HyracksDataException {
this(ctx, accessorProbe, tpcProbe, accessorBuild, rDBuild, tpcBuild, comparator, isLeftOuter,
missingWritersBuild, table, predEval, false, bufferManager);
}
- public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
- ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild,
- RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild, FrameTuplePairComparator comparator,
- boolean isLeftOuter, IMissingWriter[] missingWritersBuild, ISerializableTable table,
- IPredicateEvaluator predEval, boolean reverse, ISimpleFrameBufferManager bufferManager)
- throws HyracksDataException {
+ public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe, ITuplePartitionComputer tpcProbe,
+ FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild,
+ FrameTuplePairComparator comparator, boolean isLeftOuter, IMissingWriter[] missingWritersBuild,
+ ISerializableTable table, IPredicateEvaluator predEval, boolean reverse,
+ ISimpleFrameBufferManager bufferManager) throws HyracksDataException {
this.table = table;
storedTuplePointer = new TuplePointer();
buffers = new ArrayList<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
index d81d955..3873bae 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
@@ -162,15 +162,15 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length]
- : null;
+ final IMissingWriter[] nullWriters1 =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length] : null;
if (isLeftOuter) {
for (int i = 0; i < nonMatchWriterFactories.length; i++) {
nullWriters1[i] = nonMatchWriterFactories[i].createMissingWriter();
}
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
final int memSizeInBytes = memSizeInFrames * ctx.getInitialFrameSize();
final IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx, memSizeInBytes);
@@ -181,10 +181,10 @@
@Override
public void open() throws HyracksDataException {
- ITuplePartitionComputer hpc0 = new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories)
- .createPartitioner();
- ITuplePartitionComputer hpc1 = new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories)
- .createPartitioner();
+ ITuplePartitionComputer hpc0 =
+ new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner();
+ ITuplePartitionComputer hpc1 =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
state = new HashBuildTaskState(ctx.getJobletContext().getJobId(),
new TaskId(getActivityId(), partition));
ISerializableTable table = new SerializableHashTable(tableSize, ctx, bufferManager);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
index 5306ae5..9c5dc60 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
@@ -51,8 +51,8 @@
int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
int fLen1 = fEnd1 - fStart1;
- int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
- .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+ int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0,
+ accessor1.getBuffer().array(), fStart1 + fStartOffset1, fLen1);
if (c != 0) {
return c;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
index d9c0bcd..d0f5a73 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
@@ -70,9 +70,9 @@
if (memSize < 3) {
throw new HyracksDataException("Not enough memory is available for Nested Loop Join");
}
- this.outerBufferMngr = new VariableFrameMemoryManager(
- new VariableFramePool(ctx, ctx.getInitialFrameSize() * (memSize - 2)),
- FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT, memSize - 2));
+ this.outerBufferMngr =
+ new VariableFrameMemoryManager(new VariableFramePool(ctx, ctx.getInitialFrameSize() * (memSize - 2)),
+ FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT, memSize - 2));
this.predEvaluator = predEval;
this.isReversed = false;
@@ -90,8 +90,8 @@
missingTupleBuilder = null;
}
- FileReference file = ctx.getJobletContext()
- .createManagedWorkspaceFile(this.getClass().getSimpleName() + this.toString());
+ FileReference file =
+ ctx.getJobletContext().createManagedWorkspaceFile(this.getClass().getSimpleName() + this.toString());
runFileWriter = new RunFileWriter(file, ctx.getIoManager());
runFileWriter.open();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
index 99dbfad..2236056 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
@@ -117,8 +117,8 @@
final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(nljAid, 0);
final RecordDescriptor rd1 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
final ITuplePairComparator comparator = comparatorFactory.createTuplePairComparator(ctx);
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory != null)
- ? predEvaluatorFactory.createPredicateEvaluator() : null;
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory != null) ? predEvaluatorFactory.createPredicateEvaluator() : null;
final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nullWriterFactories1.length] : null;
if (isLeftOuter) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
index d49a6dd..ddf1741 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
@@ -110,8 +110,7 @@
private int[] probePSizeInTups;
public OptimizedHybridHashJoin(IHyracksTaskContext ctx, int memSizeInFrames, int numOfPartitions,
- String probeRelName,
- String buildRelName, int[] probeKeys, int[] buildKeys, IBinaryComparator[] comparators,
+ String probeRelName, String buildRelName, int[] probeKeys, int[] buildKeys, IBinaryComparator[] comparators,
RecordDescriptor probeRd, RecordDescriptor buildRd, ITuplePartitionComputer probeHpc,
ITuplePartitionComputer buildHpc, IPredicateEvaluator predEval, boolean isLeftOuter,
IMissingWriterFactory[] nullWriterFactories1) {
@@ -259,8 +258,8 @@
break;
}
try {
- for (int pid = spilledStatus.nextSetBit(0); pid >= 0
- && pid < numOfPartitions; pid = spilledStatus.nextSetBit(pid + 1)) {
+ for (int pid = spilledStatus.nextSetBit(0); pid >= 0 && pid < numOfPartitions; pid =
+ spilledStatus.nextSetBit(pid + 1)) {
if (bufferManager.getNumTuples(pid) > 0) {
bufferManager.flushPartition(pid, getSpillWriterOrCreateNewOneIfNotExist(pid, whichSide));
bufferManager.clearPartition(pid);
@@ -293,16 +292,15 @@
// For partitions in main memory, we deduct their size from the free space.
int inMemTupCount = 0;
- for (int p = spilledStatus.nextClearBit(0); p >= 0
- && p < numOfPartitions; p = spilledStatus.nextClearBit(p + 1)) {
+ for (int p = spilledStatus.nextClearBit(0); p >= 0 && p < numOfPartitions; p =
+ spilledStatus.nextClearBit(p + 1)) {
freeSpace -= bufferManager.getPhysicalSize(p);
inMemTupCount += buildPSizeInTups[p];
}
// Calculates the expected hash table size for the given number of tuples in main memory
// and deducts it from the free space.
- long hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount,
- frameSize);
+ long hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount, frameSize);
freeSpace -= hashTableByteSizeForInMemTuples;
// In the case where free space is less than zero after considering the hash table size,
@@ -317,8 +315,9 @@
int pidToSpill = selectSinglePartitionToSpill(freeSpace, inMemTupCount, frameSize);
if (pidToSpill >= 0) {
// There is a suitable one. We spill that partition to the disk.
- long hashTableSizeDecrease = -SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(
- inMemTupCount, -buildPSizeInTups[pidToSpill], frameSize);
+ long hashTableSizeDecrease =
+ -SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(inMemTupCount,
+ -buildPSizeInTups[pidToSpill], frameSize);
freeSpace = freeSpace + bufferManager.getPhysicalSize(pidToSpill) + hashTableSizeDecrease;
inMemTupCount -= buildPSizeInTups[pidToSpill];
spillPartition(pidToSpill);
@@ -327,8 +326,8 @@
} else {
// There is no single suitable partition. So, we need to spill multiple partitions to the disk
// in order to accommodate the hash table.
- for (int p = spilledStatus.nextClearBit(0); p >= 0
- && p < numOfPartitions; p = spilledStatus.nextClearBit(p + 1)) {
+ for (int p = spilledStatus.nextClearBit(0); p >= 0 && p < numOfPartitions; p =
+ spilledStatus.nextClearBit(p + 1)) {
int spaceToBeReturned = bufferManager.getPhysicalSize(p);
int numberOfTuplesToBeSpilled = buildPSizeInTups[p];
if (spaceToBeReturned == 0 || numberOfTuplesToBeSpilled == 0) {
@@ -340,9 +339,9 @@
// Since the number of tuples in memory has been decreased,
// the hash table size will be decreased, too.
// We put minus since the method returns a negative value to represent a newly reclaimed space.
- long expectedHashTableSizeDecrease = -SerializableHashTable
- .calculateByteSizeDeltaForTableSizeChange(inMemTupCount, -numberOfTuplesToBeSpilled,
- frameSize);
+ long expectedHashTableSizeDecrease =
+ -SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(inMemTupCount,
+ -numberOfTuplesToBeSpilled, frameSize);
freeSpace = freeSpace + spaceToBeReturned + expectedHashTableSizeDecrease;
// Adjusts the hash table size
inMemTupCount -= numberOfTuplesToBeSpilled;
@@ -356,8 +355,7 @@
// If more partitions have been spilled to the disk, calculate the expected hash table size again
// before bringing some partitions to main memory.
if (moreSpilled) {
- hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount,
- frameSize);
+ hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount, frameSize);
}
// Brings back some partitions if there is enough free space.
@@ -387,8 +385,8 @@
long minSpaceAfterSpill = (long) memSizeInFrames * frameSize;
int minSpaceAfterSpillPartID = -1;
- for (int p = spilledStatus.nextClearBit(0); p >= 0
- && p < numOfPartitions; p = spilledStatus.nextClearBit(p + 1)) {
+ for (int p = spilledStatus.nextClearBit(0); p >= 0 && p < numOfPartitions; p =
+ spilledStatus.nextClearBit(p + 1)) {
if (buildPSizeInTups[p] == 0 || bufferManager.getPhysicalSize(p) == 0) {
continue;
}
@@ -408,8 +406,8 @@
}
private int selectPartitionsToReload(long freeSpace, int pid, int inMemTupCount) {
- for (int i = spilledStatus.nextSetBit(pid); i >= 0
- && i < numOfPartitions; i = spilledStatus.nextSetBit(i + 1)) {
+ for (int i = spilledStatus.nextSetBit(pid); i >= 0 && i < numOfPartitions; i =
+ spilledStatus.nextSetBit(i + 1)) {
int spilledTupleCount = buildPSizeInTups[i];
// Expected hash table size increase after reloading this partition
long expectedHashTableByteSizeIncrease = SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(
@@ -452,10 +450,10 @@
private void createInMemoryJoiner(int inMemTupCount) throws HyracksDataException {
ISerializableTable table = new SerializableHashTable(inMemTupCount, ctx, bufferManagerForHashTable);
- this.inMemJoiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRd), probeHpc,
- new FrameTupleAccessor(buildRd), buildRd, buildHpc,
- new FrameTuplePairComparator(probeKeys, buildKeys, comparators), isLeftOuter, nonMatchWriters, table,
- predEvaluator, isReversed, bufferManagerForHashTable);
+ this.inMemJoiner =
+ new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRd), probeHpc, new FrameTupleAccessor(buildRd),
+ buildRd, buildHpc, new FrameTuplePairComparator(probeKeys, buildKeys, comparators), isLeftOuter,
+ nonMatchWriters, table, predEvaluator, isReversed, bufferManagerForHashTable);
}
private void loadDataInMemJoin() throws HyracksDataException {
@@ -632,8 +630,8 @@
buf.append("(A) Spilled partitions" + "\n");
int spilledTupleCount = 0;
int spilledPartByteSize = 0;
- for (int pid = spilledStatus.nextSetBit(0); pid >= 0
- && pid < numOfPartitions; pid = spilledStatus.nextSetBit(pid + 1)) {
+ for (int pid = spilledStatus.nextSetBit(0); pid >= 0 && pid < numOfPartitions; pid =
+ spilledStatus.nextSetBit(pid + 1)) {
if (whichSide == SIDE.BUILD) {
spilledTupleCount += buildPSizeInTups[pid];
spilledPartByteSize += buildRFWriters[pid].getFileSize();
@@ -653,8 +651,8 @@
buf.append("(B) In-memory partitions" + "\n");
int inMemoryTupleCount = 0;
int inMemoryPartByteSize = 0;
- for (int pid = spilledStatus.nextClearBit(0); pid >= 0
- && pid < numOfPartitions; pid = spilledStatus.nextClearBit(pid + 1)) {
+ for (int pid = spilledStatus.nextClearBit(0); pid >= 0 && pid < numOfPartitions; pid =
+ spilledStatus.nextClearBit(pid + 1)) {
if (whichSide == SIDE.BUILD) {
inMemoryTupleCount += buildPSizeInTups[pid];
inMemoryPartByteSize += bufferManager.getPhysicalSize(pid);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index 8dbe9b0..9eeb363 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -268,17 +268,19 @@
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(
ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition));
- ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
- hashFunctionGeneratorFactories).createPartitioner(0);
- ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
- hashFunctionGeneratorFactories).createPartitioner(0);
+ ITuplePartitionComputer probeHpc =
+ new FieldHashPartitionComputerFamily(probeKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(0);
+ ITuplePartitionComputer buildHpc =
+ new FieldHashPartitionComputerFamily(buildKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(0);
boolean isFailed = false;
@Override
@@ -287,8 +289,8 @@
throw new HyracksDataException("Not enough memory is assigend for Hybrid Hash Join.");
}
state.memForJoin = memSizeInFrames - 2;
- state.numOfPartitions = getNumberOfPartitions(state.memForJoin, inputsize0, fudgeFactor,
- nPartitions);
+ state.numOfPartitions =
+ getNumberOfPartitions(state.memForJoin, inputsize0, fudgeFactor, nPartitions);
state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
buildHpc, predEvaluator, isLeftOuter, nonMatchWriterFactories);
@@ -355,21 +357,21 @@
final RecordDescriptor buildRd = recordDescProvider.getInputRecordDescriptor(buildAid, 0);
final RecordDescriptor probeRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
final IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
- final ITuplePairComparator nljComparatorProbe2Build = tuplePairComparatorFactoryProbe2Build
- .createTuplePairComparator(ctx);
- final ITuplePairComparator nljComparatorBuild2Probe = tuplePairComparatorFactoryBuild2Probe
- .createTuplePairComparator(ctx);
- final IPredicateEvaluator predEvaluator = predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator();
+ final ITuplePairComparator nljComparatorProbe2Build =
+ tuplePairComparatorFactoryProbe2Build.createTuplePairComparator(ctx);
+ final ITuplePairComparator nljComparatorBuild2Probe =
+ tuplePairComparatorFactoryBuild2Probe.createTuplePairComparator(ctx);
+ final IPredicateEvaluator predEvaluator =
+ predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator();
for (int i = 0; i < comparatorFactories.length; i++) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nonMatchWriter = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length]
- : null;
- final ArrayTupleBuilder nullTupleBuild = isLeftOuter ? new ArrayTupleBuilder(buildRd.getFieldCount())
- : null;
+ final IMissingWriter[] nonMatchWriter =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length] : null;
+ final ArrayTupleBuilder nullTupleBuild =
+ isLeftOuter ? new ArrayTupleBuilder(buildRd.getFieldCount()) : null;
if (isLeftOuter) {
DataOutput out = nullTupleBuild.getDataOutput();
for (int i = 0; i < nonMatchWriterFactories.length; i++) {
@@ -432,8 +434,8 @@
}
BitSet partitionStatus = state.hybridHJ.getPartitionStatus();
rPartbuff.reset();
- for (int pid = partitionStatus.nextSetBit(0); pid >= 0; pid = partitionStatus
- .nextSetBit(pid + 1)) {
+ for (int pid = partitionStatus.nextSetBit(0); pid >= 0; pid =
+ partitionStatus.nextSetBit(pid + 1)) {
RunFileReader bReader = state.hybridHJ.getBuildRFReader(pid);
RunFileReader pReader = state.hybridHJ.getProbeRFReader(pid);
@@ -474,10 +476,12 @@
//The buildSideReader should be always the original buildSideReader, so should the probeSideReader
private void joinPartitionPair(RunFileReader buildSideReader, RunFileReader probeSideReader,
int buildSizeInTuple, int probeSizeInTuple, int level) throws HyracksDataException {
- ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
- hashFunctionGeneratorFactories).createPartitioner(level);
- ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
- hashFunctionGeneratorFactories).createPartitioner(level);
+ ITuplePartitionComputer probeHpc =
+ new FieldHashPartitionComputerFamily(probeKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(level);
+ ITuplePartitionComputer buildHpc =
+ new FieldHashPartitionComputerFamily(buildKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(level);
int frameSize = ctx.getInitialFrameSize();
long buildPartSize = (long) Math.ceil((double) buildSideReader.getFileSize() / (double) frameSize);
@@ -492,10 +496,10 @@
}
// Calculate the expected hash table size for the both side.
- long expectedHashTableSizeForBuildInFrame = SerializableHashTable
- .getExpectedTableFrameCount(buildSizeInTuple, frameSize);
- long expectedHashTableSizeForProbeInFrame = SerializableHashTable
- .getExpectedTableFrameCount(probeSizeInTuple, frameSize);
+ long expectedHashTableSizeForBuildInFrame =
+ SerializableHashTable.getExpectedTableFrameCount(buildSizeInTuple, frameSize);
+ long expectedHashTableSizeForProbeInFrame =
+ SerializableHashTable.getExpectedTableFrameCount(probeSizeInTuple, frameSize);
//Apply in-Mem HJ if possible
if (!skipInMemoryHJ && ((buildPartSize + expectedHashTableSizeForBuildInFrame < state.memForJoin)
@@ -646,9 +650,8 @@
} else { //Case 2.1.2 - Switch to NLJ
if (LOGGER.isDebugEnabled()) {
- LOGGER.debug(
- "\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH "
- + "(isLeftOuter || build<probe) - [Level " + level + "]");
+ LOGGER.debug("\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH "
+ + "(isLeftOuter || build<probe) - [Level " + level + "]");
}
for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
@@ -696,8 +699,8 @@
probeTupleAccessor.reset(rPartbuff.getBuffer());
for (int tid = 0; tid < probeTupleAccessor.getTupleCount(); tid++) {
FrameUtils.appendConcatToWriter(writer, nullResultAppender, probeTupleAccessor, tid,
- nullTupleBuild.getFieldEndOffsets(), nullTupleBuild.getByteArray(), 0,
- nullTupleBuild.getSize());
+ nullTupleBuild.getFieldEndOffsets(), nullTupleBuild.getByteArray(), 0,
+ nullTupleBuild.getSize());
}
}
nullResultAppender.write(writer, true);
@@ -713,13 +716,13 @@
boolean isReversed = pKeys == OptimizedHybridHashJoinOperatorDescriptor.this.buildKeys
&& bKeys == OptimizedHybridHashJoinOperatorDescriptor.this.probeKeys;
assert isLeftOuter ? !isReversed : true : "LeftOut Join can not reverse roles";
- IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx,
- state.memForJoin * ctx.getInitialFrameSize());
+ IDeallocatableFramePool framePool =
+ new DeallocatableFramePool(ctx, state.memForJoin * ctx.getInitialFrameSize());
ISimpleFrameBufferManager bufferManager = new FramePoolBackedFrameBufferManager(framePool);
ISerializableTable table = new SerializableHashTable(tabSize, ctx, bufferManager);
- InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRDesc),
- hpcRepProbe, new FrameTupleAccessor(buildRDesc), buildRDesc, hpcRepBuild,
+ InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRDesc), hpcRepProbe,
+ new FrameTupleAccessor(buildRDesc), buildRDesc, hpcRepBuild,
new FrameTuplePairComparator(pKeys, bKeys, comparators), isLeftOuter, nonMatchWriter, table,
predEvaluator, isReversed, bufferManager);
@@ -777,11 +780,11 @@
// Hence the reverse relation is different.
boolean isReversed = outerRd == buildRd && innerRd == probeRd;
assert isLeftOuter ? !isReversed : true : "LeftOut Join can not reverse roles";
- ITuplePairComparator nljComptorOuterInner = isReversed ? nljComparatorBuild2Probe
- : nljComparatorProbe2Build;
- NestedLoopJoin nlj = new NestedLoopJoin(ctx, new FrameTupleAccessor(outerRd),
- new FrameTupleAccessor(innerRd), nljComptorOuterInner, memorySize, predEvaluator,
- isLeftOuter, nonMatchWriter);
+ ITuplePairComparator nljComptorOuterInner =
+ isReversed ? nljComparatorBuild2Probe : nljComparatorProbe2Build;
+ NestedLoopJoin nlj =
+ new NestedLoopJoin(ctx, new FrameTupleAccessor(outerRd), new FrameTupleAccessor(innerRd),
+ nljComptorOuterInner, memorySize, predEvaluator, isLeftOuter, nonMatchWriter);
nlj.setIsReversed(isReversed);
IFrame cacheBuff = new VSizeFrame(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
index 0629168..7b687c4 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
@@ -34,8 +34,8 @@
private byte[] tupleData;
private int tupleSize;
- public ConstantTupleSourceOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc, int[] fieldSlots,
- byte[] tupleData, int tupleSize) {
+ public ConstantTupleSourceOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+ int[] fieldSlots, byte[] tupleData, int tupleSize) {
super(spec, 0, 1);
this.tupleData = tupleData;
this.fieldSlots = fieldSlots;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
index 156198a..29c8fed 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
@@ -40,7 +40,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
@Override
public void open() throws HyracksDataException {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
index 4fc1ad2..a190686 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
@@ -46,7 +46,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private FrameTupleAccessor fta;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
index 31cbaad..6ba11ca 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
@@ -59,8 +59,8 @@
}
public void open(IHyracksTaskContext ctx) throws HyracksDataException {
- FileReference file = ctx.getJobletContext()
- .createManagedWorkspaceFile(MaterializerTaskState.class.getSimpleName());
+ FileReference file =
+ ctx.getJobletContext().createManagedWorkspaceFile(MaterializerTaskState.class.getSimpleName());
out = new RunFileWriter(file, ctx.getIoManager());
out.open();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
index d3e87d4..3f97752 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
@@ -59,8 +59,8 @@
@Override
public void contributeActivities(IActivityGraphBuilder builder) {
if (isSingleActivity) {
- MaterializerReaderActivityNode mra = new MaterializerReaderActivityNode(
- new ActivityId(odId, MATERIALIZER_READER_ACTIVITY_ID));
+ MaterializerReaderActivityNode mra =
+ new MaterializerReaderActivityNode(new ActivityId(odId, MATERIALIZER_READER_ACTIVITY_ID));
builder.addActivity(this, mra);
builder.addSourceEdge(0, mra, 0);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
index d081bdb..b0cc40c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
@@ -76,8 +76,8 @@
PrintStream printStream = new PrintStream(frameOutputStream);
final RecordDescriptor outRecordDesc = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
- final IResultSerializer resultSerializer = resultSerializerFactory.createResultSerializer(outRecordDesc,
- printStream);
+ final IResultSerializer resultSerializer =
+ resultSerializerFactory.createResultSerializer(outRecordDesc, printStream);
final FrameTupleAccessor frameTupleAccessor = new FrameTupleAccessor(outRecordDesc);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java
index 3cbe86b..4d9e813 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java
@@ -72,9 +72,8 @@
// right now we didn't take multiple key normalizers for frame merger, since during this step it won't be
// too many cache misses (merging multiple runs sequentially).
// but still, we can apply a special optimization if there is only 1 sort field
- this.normalizedKeyDecisive =
- nmkComputer != null ? nmkComputer.getNormalizedKeyProperties().isDecisive() && comparators.length == 1
- : false;
+ this.normalizedKeyDecisive = nmkComputer != null
+ ? nmkComputer.getNormalizedKeyProperties().isDecisive() && comparators.length == 1 : false;
this.recordDesc = recordDesc;
this.topK = topK;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
index a90d48f..dea770a 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
@@ -41,9 +41,9 @@
public TopKSorterOperatorDescriptor(IOperatorDescriptorRegistry spec, int framesLimit, int topK, int[] sortFields,
INormalizedKeyComputerFactory firstKeyNormalizerFactory, IBinaryComparatorFactory[] comparatorFactories,
RecordDescriptor recordDescriptor) {
- this(spec, framesLimit, topK, sortFields,
- firstKeyNormalizerFactory != null ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory }
- : null,
+ this(spec, framesLimit, topK,
+ sortFields, firstKeyNormalizerFactory != null
+ ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory } : null,
comparatorFactories, recordDescriptor);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java
index 7d4db64..8ff77ca 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java
@@ -42,7 +42,7 @@
private int tupleCount;
private int freeDataEndOffset;
private int deletedSpace;
- private byte[] array; // to speed up the array visit a little
+ private byte[] array; // to speed up the array visit a little
public DeletableFrameTupleAppender(RecordDescriptor recordDescriptor) {
this.recordDescriptor = recordDescriptor;
@@ -146,7 +146,7 @@
endOffset = getTupleEndOffset(i);
if (endOffset >= 0) {
int length = endOffset - startOffset;
- assert ( length >= 0);
+ assert (length >= 0);
if (freeDataEndOffset != startOffset) {
System.arraycopy(array, startOffset, array, freeDataEndOffset, length);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java
index d9460aa..1f45032 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java
@@ -26,8 +26,7 @@
public class GroupVSizeFrame extends VSizeFrame {
- public GroupVSizeFrame(IHyracksCommonContext ctx, int frameSize)
- throws HyracksDataException {
+ public GroupVSizeFrame(IHyracksCommonContext ctx, int frameSize) throws HyracksDataException {
super(ctx, frameSize);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java
index 1b202e5..7cb6356 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java
@@ -19,5 +19,5 @@
package org.apache.hyracks.dataflow.std.structures;
-public interface IResetableComparable<T> extends IResetable<T>, Comparable<T>{
+public interface IResetableComparable<T> extends IResetable<T>, Comparable<T> {
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java
index 015ddb3..51f9984 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java
@@ -57,8 +57,7 @@
* @return the number of frames that are reclaimed.
* @throws HyracksDataException
*/
- int collectGarbage(ITuplePointerAccessor bufferAccessor, ITuplePartitionComputer tpc)
- throws HyracksDataException;
+ int collectGarbage(ITuplePointerAccessor bufferAccessor, ITuplePartitionComputer tpc) throws HyracksDataException;
/**
* Prints out the internal information of this table.
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java
index 4c9d05d..87d17da 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java
@@ -64,4 +64,3 @@
trickleDown(0);
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java
index ca97be3..e6da7c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java
@@ -45,8 +45,7 @@
}
public SerializableHashTable(int tableSize, final IHyracksFrameMgrContext ctx,
- ISimpleFrameBufferManager bufferManager, double garbageCollectionThreshold)
- throws HyracksDataException {
+ ISimpleFrameBufferManager bufferManager, double garbageCollectionThreshold) throws HyracksDataException {
super(tableSize, ctx, false);
this.bufferManager = bufferManager;
@@ -142,8 +141,8 @@
// Step #2. Advances the reader until it hits the end of the given frame.
while (gcInfo.currentReadIntOffsetInPageForGC < frameCapacity) {
- nextSlotIntPosInPageForGC = findNextSlotInPage(currentReadContentFrameForGC,
- gcInfo.currentReadIntOffsetInPageForGC);
+ nextSlotIntPosInPageForGC =
+ findNextSlotInPage(currentReadContentFrameForGC, gcInfo.currentReadIntOffsetInPageForGC);
if (nextSlotIntPosInPageForGC == INVALID_VALUE) {
// There isn't a valid slot in the page. Exits the loop #2 and reads the next frame.
@@ -174,8 +173,8 @@
}
// Migrates this slot to the current offset in Writer's Frame if possible.
- currentPageChanged = MigrateSlot(gcInfo, bufferAccessor, tpc, capacityInIntCount,
- nextSlotIntPosInPageForGC);
+ currentPageChanged =
+ MigrateSlot(gcInfo, bufferAccessor, tpc, capacityInIntCount, nextSlotIntPosInPageForGC);
if (currentPageChanged) {
currentReadContentFrameForGC = contents.get(gcInfo.currentReadPageForGC);
@@ -385,8 +384,8 @@
* given tuple pointer.
*/
private void updateHeaderToContentPointerInHeaderFrame(ITuplePointerAccessor bufferAccessor,
- ITuplePartitionComputer tpc, TuplePointer hashedTuple, int newContentFrame,
- int newOffsetInContentFrame) throws HyracksDataException {
+ ITuplePartitionComputer tpc, TuplePointer hashedTuple, int newContentFrame, int newOffsetInContentFrame)
+ throws HyracksDataException {
// Finds the original hash value. We assume that bufferAccessor and tpc is already assigned.
bufferAccessor.reset(hashedTuple);
int entry = tpc.partition(bufferAccessor, hashedTuple.getTupleIndex(), tableSize);
@@ -401,7 +400,6 @@
headerFrame.writeInt(offsetInHeaderFrame + 1, newOffsetInContentFrame);
}
-
/**
* Tries to find the next valid slot position in the given content frame from the current position.
*/
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java
index 6497a53..cfc58ce 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java
@@ -309,16 +309,16 @@
lastContentFrame.writeInt(lastOffsetInCurrentFrame + 2, pointer.getFrameIndex());
lastContentFrame.writeInt(lastOffsetInCurrentFrame + 3, pointer.getTupleIndex());
int newLastOffsetInContentFrame = lastOffsetInCurrentFrame + entryCapacity * 2;
- newLastOffsetInContentFrame = newLastOffsetInContentFrame < frameCapacity ? newLastOffsetInContentFrame
- : frameCapacity - 1;
+ newLastOffsetInContentFrame =
+ newLastOffsetInContentFrame < frameCapacity ? newLastOffsetInContentFrame : frameCapacity - 1;
currentOffsetInEachFrameList.set(currentFrameNumber, newLastOffsetInContentFrame);
requiredIntCapacity = entryCapacity * 2 - (frameCapacity - lastOffsetInCurrentFrame);
while (requiredIntCapacity > 0) {
currentFrameNumber++;
requiredIntCapacity -= frameCapacity;
- newLastOffsetInContentFrame = requiredIntCapacity < 0 ? requiredIntCapacity + frameCapacity
- : frameCapacity - 1;
+ newLastOffsetInContentFrame =
+ requiredIntCapacity < 0 ? requiredIntCapacity + frameCapacity : frameCapacity - 1;
currentOffsetInEachFrameList.set(currentFrameNumber, newLastOffsetInContentFrame);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
index 967977e..a03d8d7 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
@@ -62,7 +62,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new UnionOperator();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
index cc6c2d9..2681fe6 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
@@ -113,7 +113,7 @@
ReferenceEntry tmp = entries[slot];
entries[slot] = curr;
curr = tmp;// winner to pass up
- }// else curr wins
+ } // else curr wins
slot >>= 1;
}
// set new entries[0]
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
index 11148a2..0e42397 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
@@ -40,8 +40,8 @@
import org.apache.hyracks.util.IntSerDeUtils;
public abstract class AbstractTupleMemoryManagerTest {
- ISerializerDeserializer[] fieldsSerDer = new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldsSerDer = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ new UTF8StringSerializerDeserializer() };
RecordDescriptor recordDescriptor = new RecordDescriptor(fieldsSerDer);
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(recordDescriptor.getFieldCount());
FrameTupleAccessor inFTA = new FrameTupleAccessor(recordDescriptor);
@@ -61,9 +61,7 @@
assertEquals(map.size(), mapInserted.size());
}
- protected Map<Integer, Integer> prepareFixedSizeTuples(
- int tuplePerFrame,
- int extraMetaBytePerFrame,
+ protected Map<Integer, Integer> prepareFixedSizeTuples(int tuplePerFrame, int extraMetaBytePerFrame,
int extraMetaBytePerRecord) throws HyracksDataException {
Map<Integer, Integer> dataSet = new HashMap<>();
ByteBuffer buffer = ByteBuffer.allocate(Common.BUDGET);
@@ -72,8 +70,7 @@
appender.reset(frame, true);
int sizePerTuple = (Common.MIN_FRAME_SIZE - 1 - tuplePerFrame * 4 - 4 - extraMetaBytePerFrame) / tuplePerFrame;
- int sizeChar =
- sizePerTuple - extraMetaBytePerRecord - fieldsSerDer.length * 4 - 4 - 2; //2byte to write str length
+ int sizeChar = sizePerTuple - extraMetaBytePerRecord - fieldsSerDer.length * 4 - 4 - 2; //2byte to write str length
assert (sizeChar > 0);
for (int i = 0; i < Common.NUM_MIN_FRAME * tuplePerFrame; i++) {
tupleBuilder.reset();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java
index 992c7f6..e5a4091 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java
@@ -54,11 +54,10 @@
}
@Test
- public void testReset(){
+ public void testReset() {
testAll();
policy.reset();
testAll();
}
-
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java
index 9d4a9a1..21680eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java
@@ -157,12 +157,12 @@
framesMemoryManager.getFrame(i, info);
fta.reset(info.getBuffer(), info.getStartOffset(), info.getLength());
for (int t = 0; t < fta.getTupleCount(); t++) {
- int id = parseTuple(fta.getBuffer(), fta.getTupleStartOffset(t) + fta.getFieldSlotsLength() + fta
- .getFieldStartOffset(t, 0));
+ int id = parseTuple(fta.getBuffer(),
+ fta.getTupleStartOffset(t) + fta.getFieldSlotsLength() + fta.getFieldStartOffset(t, 0));
// System.out.println("frameid:" + i + ",tuple:" + t + ",has id:" + id + ",length:" +
// (fta.getTupleEndOffset(t) - fta.getTupleStartOffset(t) - fta.getFieldSlotsLength()));
- assertTrue(tupleSet.remove(id) == fta.getTupleEndOffset(t) - fta.getTupleStartOffset(t) - fta
- .getFieldSlotsLength());
+ assertTrue(tupleSet.remove(id) == fta.getTupleEndOffset(t) - fta.getTupleStartOffset(t)
+ - fta.getFieldSlotsLength());
}
}
assertTrue(tupleSet.isEmpty());
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java
index e2a231f..8cc6df3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java
@@ -155,8 +155,7 @@
}
private ByteBuffer deleteRandomSelectedTuples(Map<Integer, Integer> map, Map<TuplePointer, Integer> mapInserted,
- int minNumOfRecordTobeDeleted)
- throws HyracksDataException {
+ int minNumOfRecordTobeDeleted) throws HyracksDataException {
ByteBuffer buffer = ByteBuffer.allocate(Common.BUDGET);
FixedSizeFrame frame = new FixedSizeFrame(buffer);
FrameTupleAppender appender = new FrameTupleAppender();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java
index 7686540..468f879 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java
@@ -39,10 +39,8 @@
public class DeletableFrameTupleAppenderTest {
DeletableFrameTupleAppender appender;
- ISerializerDeserializer[] fields = new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(),
- };
+ ISerializerDeserializer[] fields = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ new UTF8StringSerializerDeserializer(), };
RecordDescriptor recordDescriptor = new RecordDescriptor(fields);
ArrayTupleBuilder builder = new ArrayTupleBuilder(recordDescriptor.getFieldCount());
static final char TEST_CH = 'x';
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java
index 16b24c4..7e31956 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java
@@ -97,7 +97,7 @@
maxHeap.peekMax(peekI);
maxHeap.getMax(maxI);
assertTrue(peekI.compareTo(maxI) == 0);
- assertEquals( i++, capacity - 1 - maxI.i);
+ assertEquals(i++, capacity - 1 - maxI.i);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java
index 1a5fba3..be10095 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java
@@ -25,7 +25,7 @@
import org.junit.Test;
-public class MinHeapTest extends AbstracHeapTest{
+public class MinHeapTest extends AbstracHeapTest {
@Test
public void testInitialMinHeap() {
@@ -102,5 +102,4 @@
}
}
-
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
index 48377e3..8ac34d8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -144,7 +144,8 @@
// B-Tree tuple, etc.
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- IIndexDataflowHelperFactory primaryHelperFactory = new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
+ IIndexDataflowHelperFactory primaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
// create operator descriptor
TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert =
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index 203d22c..2fb1cee 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -145,7 +145,8 @@
// to field 0 of B-Tree tuple,
// etc.
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
+ IIndexDataflowHelperFactory dataflowHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, recDesc,
fieldPermutation, 0.7f, false, 1000L, true, dataflowHelperFactory);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index 603dc6b..c32b72c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -139,7 +139,8 @@
// into search op
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
+ IIndexDataflowHelperFactory dataflowHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, lowKeyFields,
highKeyFields, true, true, dataflowHelperFactory, false, false, null,
NoOpOperationCallbackFactory.INSTANCE, null, null, false);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 7507f10..a8bea08 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -117,7 +117,8 @@
// use a disk-order scan to read primary index
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- IIndexDataflowHelperFactory primaryHelperFactory = new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
+ IIndexDataflowHelperFactory primaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec,
recDesc, primaryHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
@@ -139,7 +140,8 @@
// tuple
int[] fieldPermutation = { 1, 0 };
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
- IIndexDataflowHelperFactory secondaryHelperFactory = new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
+ IIndexDataflowHelperFactory secondaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, null,
fieldPermutation, 0.7f, false, 1000L, true, secondaryHelperFactory);
JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index 1e909ef..ccf20fe 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -183,7 +183,8 @@
// op
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- IIndexDataflowHelperFactory primaryHelperFactory = new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
+ IIndexDataflowHelperFactory primaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
primaryLowKeyFields, primaryHighKeyFields, true, true, primaryHelperFactory, false, false, null,
NoOpOperationCallbackFactory.INSTANCE, null, null, false);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java
index d6775cc..db51ed2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java
@@ -31,19 +31,17 @@
public class DataSetConstants {
- public static final RecordDescriptor inputRecordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ public static final RecordDescriptor inputRecordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- public static final IValueParserFactory[] inputParserFactories =
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE };
+ public static final IValueParserFactory[] inputParserFactories = new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
// field, type and key declarations for primary index
public static int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
@@ -54,10 +52,9 @@
public static final IBinaryComparatorFactory[] filterCmpFactories =
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
- public static final ITypeTraits[] primaryTypeTraits =
- new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS,
- UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS,
- UTF8StringPointable.TYPE_TRAITS };
+ public static final ITypeTraits[] primaryTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS,
+ UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS,
+ UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS };
public static final IBinaryComparatorFactory[] primaryComparatorFactories =
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
@@ -65,18 +62,16 @@
public static final int[] primaryBloomFilterKeyFields = new int[] { 0 };
- public static final RecordDescriptor primaryRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ public static final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- public static final RecordDescriptor primaryAndFilterRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ public static final RecordDescriptor primaryAndFilterRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
// field, type and key declarations for secondary indexes
@@ -94,11 +89,10 @@
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
- public static final RecordDescriptor secondaryRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
- public static final RecordDescriptor secondaryWithFilterRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ public static final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
+ public static final RecordDescriptor secondaryWithFilterRecDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer() });
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
index d230f38..5b091a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
@@ -39,7 +39,6 @@
public static final boolean IS_POINT_MBR = false;
public static final boolean DURABLE = true;
-
public LSMRTreeWithAntiMatterTuplesOperatorTestHelper(IOManager ioManager) {
super(ioManager);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
index 7a675bc..1bb58b8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
@@ -48,8 +48,8 @@
public class SerializationDeserializationTest {
private static final Logger LOGGER = LogManager.getLogger();
- private static final String DBLP_FILE = "data" + File.separator + "device1" + File.separator + "data"
- + File.separator + "dblp.txt";
+ private static final String DBLP_FILE =
+ "data" + File.separator + "device1" + File.separator + "data" + File.separator + "dblp.txt";
private static class SerDeserRunner {
private final IHyracksTaskContext ctx;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
index ae27ac9..0931501 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -106,7 +106,7 @@
ncConfig1.setClusterListenAddress("127.0.0.1");
ncConfig1.setDataListenAddress("127.0.0.1");
ncConfig1.setResultListenAddress("127.0.0.1");
- ncConfig1.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
+ ncConfig1.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
@@ -116,7 +116,7 @@
ncConfig2.setClusterListenAddress("127.0.0.1");
ncConfig2.setDataListenAddress("127.0.0.1");
ncConfig2.setResultListenAddress("127.0.0.1");
- ncConfig2.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
+ ncConfig2.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index 7100895..58da8a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -245,8 +245,7 @@
@Override
public JobSubmissionStatus allocate(JobSpecification job) throws HyracksException {
return maxRAM > job.getRequiredClusterCapacity().getAggregatedMemoryByteSize()
- ? JobSubmissionStatus.EXECUTE
- : JobSubmissionStatus.QUEUE;
+ ? JobSubmissionStatus.EXECUTE : JobSubmissionStatus.QUEUE;
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
index 080746c..752c643 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
@@ -73,9 +73,8 @@
*/
public class AggregationTest extends AbstractIntegrationTest {
- final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
- new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.002" + File.separator
- + "lineitem.tbl") });
+ final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.002" + File.separator + "lineitem.tbl") });
final RecordDescriptor desc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
@@ -110,8 +109,8 @@
public void singleKeySumPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -150,8 +149,8 @@
public void singleKeySumExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -164,18 +163,19 @@
int tableSize = 8;
long fileSize = frameLimits * spec.getFrameSize();
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, frameLimits,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
- new FloatSumFieldAggregatorFactory(5, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new FloatSumFieldAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields, frameLimits,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
+ new FloatSumFieldAggregatorFactory(5, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new FloatSumFieldAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
@@ -199,8 +199,8 @@
public void singleKeyAvgPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -239,8 +239,8 @@
public void singleKeyAvgExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -253,18 +253,19 @@
int tableSize = 8;
long fileSize = frameLimits * spec.getFrameSize();
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, frameLimits,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
- new CountFieldAggregatorFactory(false), new AvgFieldGroupAggregatorFactory(1, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new AvgFieldMergeAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields, frameLimits,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new CountFieldAggregatorFactory(false),
+ new AvgFieldGroupAggregatorFactory(1, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new AvgFieldMergeAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
@@ -288,13 +289,13 @@
public void singleKeyMinMaxStringPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() });
int[] keyFields = new int[] { 0 };
@@ -328,13 +329,13 @@
public void singleKeyMinMaxStringExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() });
int[] keyFields = new int[] { 0 };
@@ -342,18 +343,19 @@
int tableSize = 8;
long fileSize = frameLimits * spec.getFrameSize();
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, frameLimits,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
- new MinMaxStringFieldAggregatorFactory(15, true, true) }),
- new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
- new MinMaxStringFieldAggregatorFactory(2, true, true) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields, frameLimits,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+ new MinMaxStringFieldAggregatorFactory(15, true, true) }),
+ new MultiFieldsAggregatorFactory(
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+ new MinMaxStringFieldAggregatorFactory(2, true, true) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
@@ -377,8 +379,8 @@
public void multiKeySumPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -419,8 +421,8 @@
public void multiKeySumExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -469,13 +471,13 @@
public void multiKeyAvgPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
@@ -513,13 +515,13 @@
public void multiKeyAvgExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
@@ -566,8 +568,8 @@
public void multiKeyMinMaxStringPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -609,8 +611,8 @@
public void multiKeyMinMaxStringExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
index 7eba9e7..ec3b8f1 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
@@ -230,8 +230,8 @@
FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(ASTERIX_IDS[0],
"data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor recordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor recordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
index c05b504..c28a5aa 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
@@ -64,14 +64,12 @@
FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 0 },
@@ -84,11 +82,13 @@
PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC2_ID);
InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, desc2);
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC2_ID);
RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -96,7 +96,8 @@
PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc3);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -106,18 +107,16 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, csvScanner, 0, sorter, 0);
IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
spec.connect(conn2, sorter, 0, group, 0);
IConnectorDescriptor conn3 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn3, group, 0, sorter2, 0);
IConnectorDescriptor conn4 = new OneToOneConnectorDescriptor(spec);
@@ -136,14 +135,12 @@
FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 0 },
@@ -156,11 +153,13 @@
PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID, NC2_ID, NC1_ID, NC2_ID);
InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, desc2);
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC1_ID, NC2_ID);
RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -168,7 +167,8 @@
PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc3);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -179,18 +179,16 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, csvScanner, 0, sorter, 0);
IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
spec.connect(conn2, sorter, 0, group, 0);
IConnectorDescriptor conn3 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn3, group, 0, sorter2, 0);
IConnectorDescriptor conn4 = new OneToOneConnectorDescriptor(spec);
@@ -209,14 +207,12 @@
FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 3, new int[] { 0 },
@@ -229,11 +225,13 @@
PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID, NC2_ID, NC1_ID, NC2_ID);
InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, desc2);
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC1_ID, NC2_ID);
RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -241,7 +239,8 @@
PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc3);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -252,18 +251,16 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, csvScanner, 0, sorter, 0);
IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
spec.connect(conn2, sorter, 0, group, 0);
IConnectorDescriptor conn3 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn3, group, 0, sorter2, 0);
IConnectorDescriptor conn4 = new OneToOneConnectorDescriptor(spec);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
index 553c5b5..40b6b27 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
@@ -134,8 +134,10 @@
verify(nc2, Mockito.timeout(TIME_THRESHOLD).times(2)).checkForDuplicateDeployedJobSpec(any());
//confirm that both jobs are distributed
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) != null && nc2.getActivityClusterGraph(distributedId1) != null);
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) != null && nc2.getActivityClusterGraph(distributedId2) != null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) != null
+ && nc2.getActivityClusterGraph(distributedId1) != null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) != null
+ && nc2.getActivityClusterGraph(distributedId2) != null);
Assert.assertTrue(cc.getDeployedJobSpecStore().getDeployedJobSpecDescriptor(distributedId1) != null);
Assert.assertTrue(cc.getDeployedJobSpecStore().getDeployedJobSpecDescriptor(distributedId2) != null);
@@ -157,7 +159,8 @@
verify(nc2, Mockito.timeout(TIME_THRESHOLD).times(1)).removeActivityClusterGraph(any());
//confirm the first job is destroyed
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) == null && nc2.getActivityClusterGraph(distributedId1) == null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) == null
+ && nc2.getActivityClusterGraph(distributedId1) == null);
cc.getDeployedJobSpecStore().checkForExistingDeployedJobSpecDescriptor(distributedId1);
//run the second job
@@ -187,7 +190,8 @@
verify(nc2, Mockito.timeout(TIME_THRESHOLD).times(2)).removeActivityClusterGraph(any());
//confirm the second job is destroyed
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) == null && nc2.getActivityClusterGraph(distributedId2) == null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) == null
+ && nc2.getActivityClusterGraph(distributedId2) == null);
cc.getDeployedJobSpecStore().checkForExistingDeployedJobSpecDescriptor(distributedId2);
//run the second job 100 times in parallel
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
index b693b09..9e795bf 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
@@ -81,11 +81,13 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
int outputLimit = 5; // larger than the total record numbers.
- TopKSorterOperatorDescriptor sorter = new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
- (INormalizedKeyComputerFactory) null,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- ordersDesc);
+ TopKSorterOperatorDescriptor sorter =
+ new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
+ (INormalizedKeyComputerFactory) null,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
@@ -145,11 +147,13 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
int outputLimit = 20;
- TopKSorterOperatorDescriptor sorter = new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
- (INormalizedKeyComputerFactory) null,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- ordersDesc);
+ TopKSorterOperatorDescriptor sorter =
+ new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
+ (INormalizedKeyComputerFactory) null,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
LimitOperatorDescriptor filter = new LimitOperatorDescriptor(spec, ordersDesc, outputLimit);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
index 67845c0..49dee84 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
@@ -125,18 +125,20 @@
int[] keyFields = new int[] { 0 };
int tableSize = 8;
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, fileSize / spec.getFrameSize() + 1,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
- new FloatSumFieldAggregatorFactory(5, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new FloatSumFieldAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields,
+ fileSize / spec.getFrameSize() + 1,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
+ new FloatSumFieldAggregatorFactory(5, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new FloatSumFieldAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, "asterix-005", "asterix-006");
@@ -190,18 +192,20 @@
int[] keyFields = new int[] { 0 };
int tableSize = 8;
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, fileSize / spec.getFrameSize() + 1,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
- new FloatSumFieldAggregatorFactory(5, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new FloatSumFieldAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields,
+ fileSize / spec.getFrameSize() + 1,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
+ new FloatSumFieldAggregatorFactory(5, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new FloatSumFieldAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, "asterix-005", "asterix-006");
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
index d7d4219..09629b2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
@@ -79,11 +79,11 @@
DelimitedDataTupleParserFactory stringParser = new DelimitedDataTupleParserFactory(
new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, '\u0000');
- RecordDescriptor stringRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
+ RecordDescriptor stringRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
- FileScanOperatorDescriptor scanOp = new FileScanOperatorDescriptor(spec, new ConstantFileSplitProvider(
- inputSplits), stringParser, stringRec);
+ FileScanOperatorDescriptor scanOp = new FileScanOperatorDescriptor(spec,
+ new ConstantFileSplitProvider(inputSplits), stringParser, stringRec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanOp, locations);
ReplicateOperatorDescriptor replicateOp = new ReplicateOperatorDescriptor(spec, stringRec, outputArity);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
index 06d7b04..75ba33f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
@@ -54,19 +54,16 @@
public void scanPrint01() throws Exception {
JobSpecification spec = new JobSpecification();
- IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
+ IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
+ new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE },
- ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID, NC1_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -87,22 +84,23 @@
public void scanPrint02() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -113,9 +111,8 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, ordScanner, 0, printer, 0);
spec.addRoot(printer);
@@ -126,22 +123,23 @@
public void scanPrint03() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -152,9 +150,8 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(IntegerPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(IntegerPointable.FACTORY) }));
spec.connect(conn1, ordScanner, 0, printer, 0);
spec.addRoot(printer);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
index df9c0d7..315b74c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
@@ -56,24 +56,25 @@
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
@@ -90,13 +91,13 @@
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
- spec.connect(
- new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
- new int[] { 1 }, new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }), new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
- .of(UTF8StringPointable.FACTORY) }, new UTF8StringNormalizedKeyComputerFactory()),
- sorter, 0, printer, 0);
+ spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec,
+ new FieldHashPartitionComputerFactory(new int[] { 1 },
+ new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+ new int[] { 1 },
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
runTest(spec);
}
@@ -106,29 +107,33 @@
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 4, new int[] { 1, 0 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+ ExternalSortOperatorDescriptor sorter =
+ new ExternalSortOperatorDescriptor(spec, 4, new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -140,15 +145,14 @@
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
- spec.connect(
- new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(new int[] {
- 1, 0 }, new IBinaryHashFunctionFactory[] {
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), new int[] { 1, 0 },
- new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
+ spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+ new int[] { 1, 0 },
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+ new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
runTest(spec);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
index d6f39ad..289f8ae 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
@@ -59,23 +59,23 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
static IValueParserFactory[] custValueParserFactories = new IValueParserFactory[custDesc.getFieldCount()];
static IValueParserFactory[] orderValueParserFactories = new IValueParserFactory[ordersDesc.getFieldCount()];
@@ -86,9 +86,8 @@
}
private IOperatorDescriptor getPrinter(JobSpecification spec, String path) {
- IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
- new FileSplit[] {
- new ManagedFileSplit(NC1_ID, path) });
+ IFileSplitProvider outputSplitProvider =
+ new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID, path) });
return DEBUG ? new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|")
: new NullSinkOperatorDescriptor(spec);
@@ -97,12 +96,12 @@
@Test
public void customerOrderCIDHybridHashJoin_Case1() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer4.tbl") };
+ FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer4.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders4.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders4.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
@@ -117,8 +116,8 @@
1.2, new int[] { 0 }, new int[] { 1 },
new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- custOrderJoinDesc, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
+ custOrderJoinDesc,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
null);
@@ -146,12 +145,12 @@
public void customerOrderCIDHybridHashJoin_Case2() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer3.tbl") };
+ FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer3.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders4.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders4.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
@@ -167,8 +166,8 @@
1.2, new int[] { 0 }, new int[] { 1 },
new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- custOrderJoinDesc, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
+ custOrderJoinDesc,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
null);
@@ -197,12 +196,12 @@
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer3.tbl") };
+ FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer3.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders1.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders1.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
@@ -218,8 +217,8 @@
1.2, new int[] { 0 }, new int[] { 1 },
new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- custOrderJoinDesc, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
+ custOrderJoinDesc,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
null);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
index 2c055c2..816f3fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -72,8 +72,8 @@
public void customerOrderCIDJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -81,18 +81,18 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -111,14 +111,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
@@ -152,8 +152,8 @@
public void customerOrderCIDHybridHashJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -161,51 +161,48 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
- HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
- spec,
- 32,
- 20,
- 200,
- 1.2,
- new int[] { 1 },
- new int[] { 0 },
+ HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(spec, 32, 20, 200, 1.2,
+ new int[] { 1 }, new int[] { 0 },
new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
custOrderJoinDesc, null, false, null);
@@ -235,8 +232,8 @@
public void customerOrderCIDInMemoryHashLeftOuterJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -244,18 +241,18 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -274,14 +271,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[ordersDesc.getFieldCount()];
@@ -320,8 +317,8 @@
public void customerOrderCIDHybridHashLeftOuterJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -329,18 +326,18 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -359,14 +356,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[ordersDesc.getFieldCount()];
@@ -406,10 +403,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -418,20 +415,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -450,14 +447,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
@@ -475,15 +472,13 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(ordJoinConn, ordScanner, 0, join, 0);
IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(custJoinConn, custScanner, 0, join, 1);
IConnectorDescriptor joinPrinterConn = new MToNBroadcastConnectorDescriptor(spec);
@@ -498,10 +493,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -510,20 +505,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -542,14 +537,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(spec, 5, 20, 100, 1.2,
@@ -588,10 +583,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -600,20 +595,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -632,20 +627,19 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) },
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
custOrderJoinDesc, 128, null, 128);
PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
@@ -679,10 +673,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -691,20 +685,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -723,14 +717,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
MaterializingOperatorDescriptor ordMat = new MaterializingOperatorDescriptor(spec, ordersDesc);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
index dc5d0bc..c2b3263 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
@@ -103,8 +103,8 @@
int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
int fLen1 = fEnd1 - fStart1;
- int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
- .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+ int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0,
+ accessor1.getBuffer().array(), fStart1 + fStartOffset1, fLen1);
if (c != 0) {
return c;
}
@@ -127,8 +127,8 @@
public void customerOrderCIDJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -136,46 +136,49 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 4, false,
- null);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 4, false, null);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -203,10 +206,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -215,48 +218,51 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, false,
- null);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 5, false, null);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -284,10 +290,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -296,48 +302,51 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 6, false,
- null);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 6, false, null);
PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
ResultSetId rsId = new ResultSetId(1);
@@ -365,10 +374,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -377,43 +386,46 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[ordersDesc.getFieldCount()];
@@ -421,9 +433,9 @@
nonMatchWriterFactories[j] = NoopMissingWriterFactory.INSTANCE;
}
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, true,
- nonMatchWriterFactories);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 5, true, nonMatchWriterFactories);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
index e4d6398..81a71eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
@@ -51,25 +51,21 @@
public static JobSpecification createUnionJobSpec() throws Exception {
JobSpecification spec = new JobSpecification();
- IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
+ IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
+ new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner01 = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner01 =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner01, NC2_ID, NC1_ID);
- FileScanOperatorDescriptor csvScanner02 = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner02 =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner02, NC2_ID, NC1_ID);
UnionAllOperatorDescriptor unionAll = new UnionAllOperatorDescriptor(spec, 2, desc);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
index 29e1d6e..9761f4d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
@@ -56,12 +56,12 @@
new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
@Test
public void sortNormalMergeTest() throws Exception {
@@ -84,34 +84,37 @@
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
spec.setFrameSize(frameSize);
- ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, frameLimit, new int[] { 1, 0 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+ ExternalSortOperatorDescriptor sorter =
+ new ExternalSortOperatorDescriptor(spec, frameLimit, new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
String path = getClass().getSimpleName() + aInteger.getAndIncrement() + ".tmp";
- IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
- new FileSplit[] { new ManagedFileSplit(NC1_ID, path) });
+ IFileSplitProvider outputSplitProvider =
+ new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID, path) });
IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|");
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
- spec.connect(
- new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(new int[] {
- 1, 0 }, new IBinaryHashFunctionFactory[] {
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), new int[] { 1, 0 },
- new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
+ spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+ new int[] { 1, 0 },
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+ new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
spec.addRoot(printer);
runTest(spec);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java
index ef9e4b6..4d3215d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java
@@ -139,7 +139,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new IOperatorNodePushable() {
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
index 8b0b8a0..25c9d5c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
@@ -123,7 +123,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new IOperatorNodePushable() {
private CountDownLatch allOpenedSignal = new CountDownLatch(3);
private Set<Long> threads = new HashSet<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
index b0c210f..b55b64e 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
@@ -58,50 +58,39 @@
public abstract class AbstractExternalGroupbyTest {
- ISerializerDeserializer[] inFields = new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(),
- };
+ ISerializerDeserializer[] inFields = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ new UTF8StringSerializerDeserializer(), };
- ISerializerDeserializer[] aggrFields = new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), // key
- IntegerSerializerDeserializer.INSTANCE, // sum
- IntegerSerializerDeserializer.INSTANCE, // count
- FloatSerializerDeserializer.INSTANCE, // avg
+ ISerializerDeserializer[] aggrFields = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), // key
+ IntegerSerializerDeserializer.INSTANCE, // sum
+ IntegerSerializerDeserializer.INSTANCE, // count
+ FloatSerializerDeserializer.INSTANCE, // avg
};
RecordDescriptor inRecordDesc = new RecordDescriptor(inFields);
RecordDescriptor outputRec = new RecordDescriptor(aggrFields);
- IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
+ IBinaryComparatorFactory[] comparatorFactories =
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
INormalizedKeyComputerFactory normalizedKeyComputerFactory = new UTF8StringNormalizedKeyComputerFactory();
IAggregatorDescriptorFactory partialAggrInPlace = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(0, false),
- new CountFieldAggregatorFactory(false),
- new AvgFieldGroupAggregatorFactory(0, false) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(0, false),
+ new CountFieldAggregatorFactory(false), new AvgFieldGroupAggregatorFactory(0, false) });
IAggregatorDescriptorFactory finalAggrInPlace = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false),
- new IntSumFieldAggregatorFactory(2, false),
- new AvgFieldMergeAggregatorFactory(3, false) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+ new IntSumFieldAggregatorFactory(2, false), new AvgFieldMergeAggregatorFactory(3, false) });
IAggregatorDescriptorFactory partialAggrInState = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(0, true),
- new CountFieldAggregatorFactory(true),
- new AvgFieldGroupAggregatorFactory(0, true) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(0, true),
+ new CountFieldAggregatorFactory(true), new AvgFieldGroupAggregatorFactory(0, true) });
IAggregatorDescriptorFactory finalAggrInState = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, true),
- new IntSumFieldAggregatorFactory(2, true),
- new AvgFieldMergeAggregatorFactory(3, true) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, true),
+ new IntSumFieldAggregatorFactory(2, true), new AvgFieldMergeAggregatorFactory(3, true) });
int[] keyFields = new int[] { 1 };
int[] keyFieldsAfterPartial = new int[] { 0 };
@@ -213,17 +202,15 @@
protected abstract IOperatorNodePushable getMerger();
- private void testBuildAndMerge(int tableSize, int numFrames, int frameSize, int minDataSize,
- int minRecordSize, int maxRecordSize,
- Map<Integer, String> specialData)
- throws HyracksDataException {
+ private void testBuildAndMerge(int tableSize, int numFrames, int frameSize, int minDataSize, int minRecordSize,
+ int maxRecordSize, Map<Integer, String> specialData) throws HyracksDataException {
IHyracksTaskContext ctx = TestUtils.create(frameSize);
initial(ctx, tableSize, numFrames);
ArrayList<IFrame> input = new ArrayList<>();
Map<Integer, String> keyValueMap = new HashMap<>();
- AbstractRunGeneratorTest
- .prepareData(ctx, input, minDataSize, minRecordSize, maxRecordSize, specialData, keyValueMap);
+ AbstractRunGeneratorTest.prepareData(ctx, input, minDataSize, minRecordSize, maxRecordSize, specialData,
+ keyValueMap);
ResultValidateWriter writer = new ResultValidateWriter(keyValueMap);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java
index 6729713..bd51619 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java
@@ -51,9 +51,9 @@
public class IntersectOperatorDescriptorTest {
- IOperatorDescriptorRegistry mockRegistry = when(
- mock(IOperatorDescriptorRegistry.class).createOperatorDescriptorId(any()))
- .thenReturn(new OperatorDescriptorId(1)).getMock();
+ IOperatorDescriptorRegistry mockRegistry =
+ when(mock(IOperatorDescriptorRegistry.class).createOperatorDescriptorId(any()))
+ .thenReturn(new OperatorDescriptorId(1)).getMock();
MultiThreadTaskEmulator multiThreadTaskEmulator = new MultiThreadTaskEmulator();
InputFrameGenerator frameGenerator = new InputFrameGenerator(256);
IHyracksTaskContext ctx = TestUtils.create(256);
@@ -72,10 +72,9 @@
inputRecordDescriptor = new RecordDescriptor[nInputs];
normalizedKeyFactory = null;
- comparatorFactory = new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY),
- PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
- };
+ comparatorFactory =
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
for (int i = 0; i < nInputs; i++) {
compareFields[i] = new int[nProjectFields];
@@ -84,17 +83,13 @@
}
}
for (int i = 0; i < nInputs; i++) {
- inputRecordDescriptor[i] = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE
- });
+ inputRecordDescriptor[i] =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
}
outRecordDescriptor = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE
- });
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
}
@Before
@@ -208,8 +203,8 @@
generateRecordStream(answer, outRecordDescriptor, 0, 100, 1);
}
- private void generateRecordStream(List<Object[]> inputs, RecordDescriptor recordDesc,
- int start, int end, int step) {
+ private void generateRecordStream(List<Object[]> inputs, RecordDescriptor recordDesc, int start, int end,
+ int step) {
for (int i = start; i < end; i += step) {
Object[] obj = new Object[recordDesc.getFieldCount()];
for (int f = 0; f < recordDesc.getFieldCount(); f++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java
index b2a8323..016fe0b 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java
@@ -137,8 +137,8 @@
public void testHybridTopKWithTwoNormalizedKeys() throws HyracksDataException {
int topK = SORT_FRAME_LIMIT;
IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
- AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(
- ctx, SORT_FRAME_LIMIT, topK, SortFields, new INormalizedKeyComputerFactory[] {
+ AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(ctx,
+ SORT_FRAME_LIMIT, topK, SortFields, new INormalizedKeyComputerFactory[] {
new IntegerNormalizedKeyComputerFactory(), new UTF8StringNormalizedKeyComputerFactory() },
ComparatorFactories, RecordDesc);
testInMemoryOnly(ctx, topK, ORDER.REVERSE, sorter);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java
index 77b6913..13ed058 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java
@@ -56,8 +56,7 @@
Object[] objects = new Object[inputRecordDescriptor.getFieldCount()];
for (int fid = 0; fid < inputRecordDescriptor.getFieldCount(); fid++) {
ByteArrayInputStream bais = new ByteArrayInputStream(frameAccessor.getBuffer().array(),
- frameAccessor.getAbsoluteFieldStartOffset(tid, fid),
- frameAccessor.getFieldLength(tid, fid));
+ frameAccessor.getAbsoluteFieldStartOffset(tid, fid), frameAccessor.getFieldLength(tid, fid));
DataInputStream dis = new DataInputStream(bais);
objects[fid] = inputRecordDescriptor.getFields()[fid].deserialize(dis);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
index 49b2779..7c85d5a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
@@ -33,6 +33,7 @@
private static Logger LOGGER = LogManager.getLogger();
@Rule
public ExpectedException closeTwice = ExpectedException.none();
+
@Test
public void runShutdown() throws Exception {
IHyracksClientConnection hcc = new HyracksConnection("localhost", 1098);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
index 3c0ecfd..2844d02 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
@@ -43,22 +43,22 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
- static RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
static RecordDescriptor lineitemDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
@@ -70,25 +70,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static IValueParserFactory[] lineitemParserFactories = new IValueParserFactory[] {
- IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
- IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
- IntegerParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
- FloatParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, };
+ static IValueParserFactory[] lineitemParserFactories = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
+ IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
+ IntegerParserFactory.INSTANCE, FloatParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
+ FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, };
static IValueParserFactory[] custParserFactories = new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE };
static IValueParserFactory[] orderParserFactories = new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
static FileSplit[] parseFileSplits(String fileSplits) {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
index 42fe8c9..80c4f88 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
@@ -134,8 +134,8 @@
createPartitionConstraint(spec, fileScanner, inSplits);
// Output: each unique string with an integer count
- RecordDescriptor outDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ RecordDescriptor outDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
// IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE });
@@ -187,9 +187,9 @@
spec.connect(scanGroupConnDef2, fileScanner, 0, grouper, 0);
IFileSplitProvider outSplitProvider = new ConstantFileSplitProvider(outSplits);
- AbstractSingleActivityOperatorDescriptor writer = outPlain ? new PlainFileWriterOperatorDescriptor(spec,
- outSplitProvider, "|")
- : new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
+ AbstractSingleActivityOperatorDescriptor writer =
+ outPlain ? new PlainFileWriterOperatorDescriptor(spec, outSplitProvider, "|")
+ : new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
createPartitionConstraint(spec, writer, outSplits);
IConnectorDescriptor groupOutConn = new OneToOneConnectorDescriptor(spec);
spec.connect(groupOutConn, grouper, 0, writer, 0);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
index 7e56004..5043974 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
@@ -87,13 +87,13 @@
}
static int[] SortFields = new int[] { 1, 0 };
- static IBinaryComparatorFactory[] SortFieldsComparatorFactories = new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
+ static IBinaryComparatorFactory[] SortFieldsComparatorFactories =
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
- static IBinaryHashFunctionFactory[] orderBinaryHashFunctionFactories = new IBinaryHashFunctionFactory[] {
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) };
+ static IBinaryHashFunctionFactory[] orderBinaryHashFunctionFactories =
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) };
public static void main(String[] args) throws Exception {
Options options = new Options();
@@ -107,8 +107,8 @@
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job = createJob(parseFileSplits(options.inFileOrderSplits),
- parseFileSplits(options.outFileSplits),
- options.memBufferAlg, options.frameLimit, options.frameSize, options.topK, options.usingHeapSorter);
+ parseFileSplits(options.outFileSplits), options.memBufferAlg, options.frameLimit, options.frameSize,
+ options.topK, options.usingHeapSorter);
long start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job,
@@ -156,8 +156,8 @@
spec.connect(
new MToNPartitioningMergingConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(SortFields, orderBinaryHashFunctionFactories),
- SortFields, SortFieldsComparatorFactories, new UTF8StringNormalizedKeyComputerFactory()),
+ new FieldHashPartitionComputerFactory(SortFields, orderBinaryHashFunctionFactories), SortFields,
+ SortFieldsComparatorFactories, new UTF8StringNormalizedKeyComputerFactory()),
sorter, 0, printer, 0);
spec.addRoot(printer);
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java
index 9010378..02c5fb3 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java
@@ -30,7 +30,6 @@
*/
public interface INcCollectionBuilder {
- public INcCollection build(Map<String, NodeControllerInfo> ncNameToNcInfos,
- Map<String, List<String>> ipToNcMapping, Map<String, Integer> ncNameToIndex, String[] NCs, int[] workloads,
- int slotLimit);
+ public INcCollection build(Map<String, NodeControllerInfo> ncNameToNcInfos, Map<String, List<String>> ipToNcMapping,
+ Map<String, Integer> ncNameToIndex, String[] NCs, int[] workloads, int slotLimit);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java
index 6d3a082..57dadb0 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java
@@ -34,6 +34,7 @@
* the IHyracksTaskContext
* @return a tuple writer instance
*/
- public ITupleWriter getTupleWriter(IHyracksTaskContext ctx, int partition, int nPartition) throws HyracksDataException;
+ public ITupleWriter getTupleWriter(IHyracksTaskContext ctx, int partition, int nPartition)
+ throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
index 8357ae0..021efca 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
@@ -90,7 +90,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
final InputSplit[] inputSplits = splitsFactory.getSplits();
return new AbstractUnaryOutputSourceOperatorNodePushable() {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
index 6d7d63b..c53a779 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
@@ -46,8 +46,7 @@
byte[] rawip;
try {
rawip = ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress();
- }
- catch (UnknownHostException e) {
+ } catch (UnknownHostException e) {
// QQQ Should probably have a neater solution than this
throw new RuntimeException(e);
}
@@ -122,8 +121,8 @@
/**
* Update the entry of the selected NC
*/
- List<String> dataLocations = ipToNcMapping.get(InetAddress.getByAddress(
- currentCandidateIp.getBytes()).getHostAddress());
+ List<String> dataLocations = ipToNcMapping
+ .get(InetAddress.getByAddress(currentCandidateIp.getBytes()).getHostAddress());
for (String nc : dataLocations) {
int ncIndex = ncNameToIndex.get(nc);
if (workloads[ncIndex] < slotLimit) {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
index c9bf547..63be8c5 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
@@ -52,8 +52,8 @@
final Map<List<Integer>, List<String>> pathToNCs = new HashMap<List<Integer>, List<String>>();
for (String NC : NCs) {
List<Integer> path = new ArrayList<>();
- String ipAddress = InetAddress.getByAddress(
- ncNameToNcInfos.get(NC).getNetworkAddress().lookupIpAddress()).getHostAddress();
+ String ipAddress = InetAddress
+ .getByAddress(ncNameToNcInfos.get(NC).getNetworkAddress().lookupIpAddress()).getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
@@ -64,8 +64,8 @@
ncs.add(NC);
}
- final TreeMap<List<Integer>, IntWritable> availableIpsToSlots = new TreeMap<List<Integer>, IntWritable>(
- (l1, l2) -> {
+ final TreeMap<List<Integer>, IntWritable> availableIpsToSlots =
+ new TreeMap<List<Integer>, IntWritable>((l1, l2) -> {
int commonLength = Math.min(l1.size(), l2.size());
for (int i = 0; i < commonLength; i++) {
int value1 = l1.get(i);
@@ -80,8 +80,9 @@
for (int i = 0; i < workloads.length; i++) {
if (workloads[i] < slotLimit) {
List<Integer> path = new ArrayList<Integer>();
- String ipAddress = InetAddress.getByAddress(
- ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress()).getHostAddress();
+ String ipAddress =
+ InetAddress.getByAddress(ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress())
+ .getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
index 615f827..25cc9b3 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
@@ -130,8 +130,8 @@
public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, ClusterTopology topology)
throws HyracksException {
this(ncNameToNcInfos);
- this.ncCollectionBuilder = topology == null ? new IPProximityNcCollectionBuilder()
- : new RackAwareNcCollectionBuilder(topology);
+ this.ncCollectionBuilder =
+ topology == null ? new IPProximityNcCollectionBuilder() : new RackAwareNcCollectionBuilder(topology);
}
/**
@@ -276,7 +276,7 @@
*/
private void scheduleLocalSlots(InputSplit[] splits, int[] workloads, String[] locations, int slots, Random random,
boolean[] scheduled, final Map<String, IntWritable> locationToNumSplits)
- throws IOException, UnknownHostException {
+ throws IOException, UnknownHostException {
/** scheduling candidates will be ordered inversely according to their popularity */
PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(3, new Comparator<String>() {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
index 15bf260..0c635e0 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
@@ -103,7 +103,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
final List<FileSplit> inputSplits = splitsFactory.getSplits();
return new AbstractUnaryOutputSourceOperatorNodePushable() {
@@ -120,8 +120,8 @@
Job job = confFactory.getConf();
job.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx);
- InputFormat inputFormat = ReflectionUtils.newInstance(job.getInputFormatClass(),
- job.getConfiguration());
+ InputFormat inputFormat =
+ ReflectionUtils.newInstance(job.getInputFormatClass(), job.getConfiguration());
int size = inputSplits.size();
for (int i = 0; i < size; i++) {
/**
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java
index 97960bc..fb46842 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java
@@ -69,7 +69,8 @@
* the hyracks cluster toplogy
* @throws HyracksException
*/
- public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, ClusterTopology topology) throws HyracksException {
+ public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, ClusterTopology topology)
+ throws HyracksException {
scheduler = new org.apache.hyracks.hdfs.scheduler.Scheduler(ncNameToNcInfos, topology);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
index 0d0cd3e..b8351f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
@@ -73,7 +73,6 @@
private static final String PATH_TO_HADOOP_CONF = FileUtil.joinPath(TEST_RESOURCES, "hadoop", "conf");
protected static final String BUILD_DIR = FileUtil.joinPath("target", "build");
-
private static final String DATA_PATH = FileUtil.joinPath(TEST_RESOURCES, "data", "customer.tbl");
protected static final String HDFS_INPUT_PATH = "/customer/";
protected static final String HDFS_OUTPUT_PATH = "/customer_result/";
@@ -151,11 +150,11 @@
String[] readSchedule = scheduler.getLocationConstraints(splits);
JobSpecification jobSpec = new JobSpecification();
- RecordDescriptor recordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor recordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
- HyracksUtils.NC2_ID };
+ String[] locations =
+ new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
readSchedule, new TextKeyValueParserFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
@@ -164,19 +163,21 @@
new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
- HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
- new TextTupleWriterFactory());
+ HDFSWriteOperatorDescriptor writeOperator =
+ new HDFSWriteOperatorDescriptor(jobSpec, conf, new TextTupleWriterFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
- jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
- new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
- new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
+ jobSpec.connect(
+ new MToNPartitioningMergingConnectorDescriptor(jobSpec,
+ new FieldHashPartitionComputerFactory(new int[] { 0 },
+ new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+ new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
sortOperator, 0, writeOperator, 0);
jobSpec.addRoot(writeOperator);
- IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
- HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+ IHyracksClientConnection client =
+ new HyracksConnection(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
JobId jobId = client.startJob(jobSpec);
client.waitForCompletion(jobId);
@@ -195,8 +196,8 @@
Path actual = new Path(ACTUAL_RESULT_DIR);
dfs.copyToLocalFile(result, actual);
- TestUtils.compareWithResult(new File(FileUtil.joinPath(EXPECTED_RESULT_PATH, "part-0")), new File(
- FileUtil.joinPath(ACTUAL_RESULT_DIR, "customer_result", "part-0")));
+ TestUtils.compareWithResult(new File(FileUtil.joinPath(EXPECTED_RESULT_PATH, "part-0")),
+ new File(FileUtil.joinPath(ACTUAL_RESULT_DIR, "customer_result", "part-0")));
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java
index bb28c79..b735833 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java
@@ -58,8 +58,8 @@
* @throws Exception
*/
public void testSchedulerSimple() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
InputSplit[] fileSplits = new InputSplit[6];
fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
@@ -94,8 +94,8 @@
int dataPort = 5099;
int resultPort = 5098;
int messagingPort = 5097;
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(4, "nc", "10.0.0.",
- dataPort, resultPort, messagingPort);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(4, "nc", "10.0.0.", dataPort, resultPort, messagingPort);
ncNameToNcInfos.put("nc7",
new NodeControllerInfo("nc7", NodeStatus.ACTIVE, new NetworkAddress("10.0.0.7", dataPort),
new NetworkAddress("10.0.0.5", resultPort), new NetworkAddress("10.0.0.5", messagingPort), 2));
@@ -112,8 +112,8 @@
fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
- fileSplits[8] = new FileSplit(new Path("part-12"), 0, 0,
- new String[] { "10.0.0.14", "10.0.0.11", "10.0.0.13" });
+ fileSplits[8] =
+ new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.14", "10.0.0.11", "10.0.0.13" });
fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" });
fileSplits[11] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
@@ -121,14 +121,14 @@
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
- String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc12", "nc7",
- "nc7", "nc12" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc12", "nc7", "nc7", "nc12" };
for (int i = 0; i < locationConstraints.length; i++) {
Assert.assertEquals(locationConstraints[i], expectedResults[i]);
}
- expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc7", "nc12", "nc7",
- "nc12" };
+ expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc7", "nc12", "nc7", "nc12" };
ClusterTopology topology = parseTopology();
scheduler = new Scheduler(ncNameToNcInfos, topology);
locationConstraints = scheduler.getLocationConstraints(fileSplits);
@@ -143,8 +143,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFS() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
InputSplit[] fileSplits = new InputSplit[12];
fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
@@ -160,8 +160,8 @@
fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
- String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6",
- "nc5", "nc6" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6", "nc5", "nc6" };
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
@@ -184,8 +184,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFSOdd() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
InputSplit[] fileSplits = new InputSplit[13];
fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
@@ -227,8 +227,8 @@
* @throws Exception
*/
public void testSchedulercBoundary() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
/** test empty file splits */
InputSplit[] fileSplits = new InputSplit[0];
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
index 3c9b1c0..8be6d69 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
@@ -98,11 +98,11 @@
String[] readSchedule = scheduler.getLocationConstraints(splits);
JobSpecification jobSpec = new JobSpecification();
- RecordDescriptor recordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor recordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
- HyracksUtils.NC2_ID };
+ String[] locations =
+ new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
readSchedule, new TextKeyValueParserFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
@@ -111,19 +111,21 @@
new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
- HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
- new TextTupleWriterFactory());
+ HDFSWriteOperatorDescriptor writeOperator =
+ new HDFSWriteOperatorDescriptor(jobSpec, conf, new TextTupleWriterFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
- jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
- new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
- new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
+ jobSpec.connect(
+ new MToNPartitioningMergingConnectorDescriptor(jobSpec,
+ new FieldHashPartitionComputerFactory(new int[] { 0 },
+ new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+ new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
sortOperator, 0, writeOperator, 0);
jobSpec.addRoot(writeOperator);
- IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
- HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+ IHyracksClientConnection client =
+ new HyracksConnection(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
JobId jobId = client.startJob(jobSpec);
client.waitForCompletion(jobId);
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java
index 4d970ba..82230718 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java
@@ -43,8 +43,8 @@
* @throws Exception
*/
public void testSchedulerSimple() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
@@ -70,8 +70,8 @@
* @throws Exception
*/
public void testSchedulerLargerHDFS() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
@@ -90,8 +90,8 @@
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
- String[] expectedResults = new String[] { "nc1", "nc4", "nc6", "nc1", "nc4", "nc2", "nc2", "nc3", "nc6", "nc5",
- "nc3", "nc5" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc6", "nc1", "nc4", "nc2", "nc2", "nc3", "nc6", "nc5", "nc3", "nc5" };
for (int i = 0; i < locationConstraints.length; i++) {
Assert.assertEquals(locationConstraints[i], expectedResults[i]);
@@ -104,8 +104,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFS() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
@@ -124,8 +124,8 @@
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
- String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6",
- "nc5", "nc6" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6", "nc5", "nc6" };
for (int i = 0; i < locationConstraints.length; i++) {
Assert.assertEquals(locationConstraints[i], expectedResults[i]);
@@ -138,8 +138,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFSOdd() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
index cb6ad0d..3ab2ab9 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
@@ -68,8 +68,8 @@
HttpResponseEncoder encoder = new HttpResponseEncoder();
ChannelPromise promise = ctx.newPromise();
promise.addListener(ChannelFutureListener.CLOSE);
- DefaultFullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1,
- HttpResponseStatus.SERVICE_UNAVAILABLE);
+ DefaultFullHttpResponse response =
+ new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.SERVICE_UNAVAILABLE);
try {
encoder.write(ctx, response, ctx.voidPromise());
ctx.writeAndFlush(ctx.alloc().buffer(0), promise);
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
index 7688974..86c8c75 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
@@ -338,9 +338,8 @@
// reset failingLoops on a good loop
failingLoops = 0;
} catch (Exception e) {
- int sleepSecs = (int)Math.pow(2, Math.min(11, failingLoops++));
- LOGGER.log(Level.ERROR, "Exception processing message; sleeping " + sleepSecs
- + " seconds", e);
+ int sleepSecs = (int) Math.pow(2, Math.min(11, failingLoops++));
+ LOGGER.log(Level.ERROR, "Exception processing message; sleeping " + sleepSecs + " seconds", e);
try {
Thread.sleep(TimeUnit.SECONDS.toMillis(sleepSecs));
} catch (InterruptedException e1) {
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
index 2c1f0dc..c4263d2 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
@@ -55,8 +55,8 @@
}
private Object deserialize(ByteBuffer buffer, int length) throws Exception {
- ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(buffer.array(), buffer.position(),
- length));
+ ObjectInputStream ois =
+ new ObjectInputStream(new ByteArrayInputStream(buffer.array(), buffer.position(), length));
Object object = ois.readObject();
ois.close();
return object;
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java
index 1f3f0c3..550ce45 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java
@@ -103,8 +103,8 @@
int length = msgSize - HEADER_SIZE;
try {
IPayloadSerializerDeserializer serde = ipcHandle.getIPCSystem().getSerializerDeserializer();
- payload = flag == ERROR ? serde.deserializeException(buffer, length) : serde.deserializeObject(buffer,
- length);
+ payload = flag == ERROR ? serde.deserializeException(buffer, length)
+ : serde.deserializeObject(buffer, length);
} finally {
buffer.position(finalPosition);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java
index b454520..1a075d5 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java
@@ -63,8 +63,8 @@
final Executor executor = Executors.newCachedThreadPool();
IIPCI ipci = new IIPCI() {
@Override
- public void deliverIncomingMessage(final IIPCHandle handle, final long mid, long rmid,
- final Object payload, Exception exception) {
+ public void deliverIncomingMessage(final IIPCHandle handle, final long mid, long rmid, final Object payload,
+ Exception exception) {
executor.execute(new Runnable() {
@Override
public void run() {
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java
index 8e91be4..3aef194 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java
@@ -27,7 +27,7 @@
@SuppressWarnings("unused") // set by Maven configuration
private String location;
- @SuppressWarnings({"unused", "MismatchedQueryAndUpdateOfCollection"}) // set by Maven configuration
+ @SuppressWarnings({ "unused", "MismatchedQueryAndUpdateOfCollection" }) // set by Maven configuration
private List<String> includes;
private List<Pattern> patterns;
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java
index 7d0e77d..1b2961f 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java
@@ -39,10 +39,7 @@
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.ProjectBuildingException;
-@Mojo(name = "licensedownload",
- requiresProject = true,
- requiresDependencyResolution = ResolutionScope.TEST,
- defaultPhase = LifecyclePhase.GENERATE_RESOURCES)
+@Mojo(name = "licensedownload", requiresProject = true, requiresDependencyResolution = ResolutionScope.TEST, defaultPhase = LifecyclePhase.GENERATE_RESOURCES)
public class DownloadLicensesMojo extends LicenseMojo {
@Parameter(required = true)
@@ -73,7 +70,7 @@
private void doDownload(int timeoutMillis, int id, String url, String fileName) {
try {
- HttpURLConnection conn = (HttpURLConnection)new URL(url).openConnection();
+ HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection();
conn.setConnectTimeout(timeoutMillis);
conn.setReadTimeout(timeoutMillis);
conn.setRequestMethod("GET");
@@ -90,4 +87,3 @@
}
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java
index 387d18e..0245eb3 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java
@@ -45,12 +45,6 @@
import java.util.jar.JarFile;
import java.util.regex.Pattern;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SequenceWriter;
-import freemarker.cache.FileTemplateLoader;
-import freemarker.template.Configuration;
-import freemarker.template.Template;
-import freemarker.template.TemplateException;
import org.apache.commons.io.IOUtils;
import org.apache.hyracks.maven.license.freemarker.IndentDirective;
import org.apache.hyracks.maven.license.freemarker.LoadFileDirective;
@@ -63,13 +57,19 @@
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.ProjectBuildingException;
-@Mojo(name = "generate",
- requiresProject = true,
- requiresDependencyResolution = ResolutionScope.TEST)
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SequenceWriter;
+import freemarker.cache.FileTemplateLoader;
+import freemarker.template.Configuration;
+import freemarker.template.Template;
+import freemarker.template.TemplateException;
+
+@Mojo(name = "generate", requiresProject = true, requiresDependencyResolution = ResolutionScope.TEST)
public class GenerateFileMojo extends LicenseMojo {
- public static final Pattern FOUNDATION_PATTERN = Pattern.compile("^\\s*This product includes software developed " +
- "(at|by) The Apache Software Foundation \\(http://www.apache.org/\\).\\s*$".replace(" ", "\\s+"),
+ public static final Pattern FOUNDATION_PATTERN = Pattern.compile(
+ "^\\s*This product includes software developed "
+ + "(at|by) The Apache Software Foundation \\(http://www.apache.org/\\).\\s*$".replace(" ", "\\s+"),
Pattern.DOTALL | Pattern.MULTILINE);
public static final Comparator<String> WHITESPACE_NORMALIZED_COMPARATOR =
@@ -121,7 +121,6 @@
}
}
-
private void resolveLicenseContent() throws IOException {
Set<LicenseSpec> licenseSpecs = new HashSet<>();
for (LicensedProjects licensedProjects : licenseMap.values()) {
@@ -158,7 +157,7 @@
private void combineCommonGavs() {
for (LicensedProjects licensedProjects : licenseMap.values()) {
Map<String, Project> projectMap = new HashMap<>();
- for (Iterator<Project> iter = licensedProjects.getProjects().iterator(); iter.hasNext(); ) {
+ for (Iterator<Project> iter = licensedProjects.getProjects().iterator(); iter.hasNext();) {
Project project = iter.next();
if (projectMap.containsKey(project.gav())) {
Project first = projectMap.get(project.gav());
@@ -208,19 +207,17 @@
private void readExtraMaps() throws IOException {
final ObjectMapper objectMapper = new ObjectMapper();
for (ExtraLicenseFile extraLicenseFile : extraLicenseMaps) {
- for (LicensedProjects projects :
- objectMapper.readValue(extraLicenseFile.getFile(), LicensedProjects[].class)) {
+ for (LicensedProjects projects : objectMapper.readValue(extraLicenseFile.getFile(),
+ LicensedProjects[].class)) {
LicenseSpec spec = urlToLicenseMap.get(projects.getLicense().getUrl());
if (spec != null) {
// TODO(mblow): probably we should always favor the extra map...
// propagate any license content we may have with what already has been loaded
- if (projects.getLicense().getContent() != null &&
- spec.getContent() == null) {
+ if (projects.getLicense().getContent() != null && spec.getContent() == null) {
spec.setContent(projects.getLicense().getContent());
}
// propagate any license displayName we may have with what already has been loaded
- if (projects.getLicense().getDisplayName() != null &&
- spec.getDisplayName() == null) {
+ if (projects.getLicense().getDisplayName() != null && spec.getDisplayName() == null) {
spec.setDisplayName(projects.getLicense().getDisplayName());
}
}
@@ -235,8 +232,8 @@
private void persistLicenseMap() throws IOException {
if (licenseMapOutputFile != null) {
licenseMapOutputFile.getParentFile().mkdirs();
- SequenceWriter sw = new ObjectMapper().writerWithDefaultPrettyPrinter()
- .writeValues(licenseMapOutputFile).init(true);
+ SequenceWriter sw =
+ new ObjectMapper().writerWithDefaultPrettyPrinter().writeValues(licenseMapOutputFile).init(true);
for (LicensedProjects entry : licenseMap.values()) {
sw.write(entry);
}
@@ -321,7 +318,7 @@
}
private void resolveArtifactFiles(final String name, Predicate<JarEntry> filter,
- BiConsumer<Project, String> consumer, UnaryOperator<String> contentTransformer)
+ BiConsumer<Project, String> consumer, UnaryOperator<String> contentTransformer)
throws MojoExecutionException, IOException {
for (Project p : getProjects()) {
File artifactFile = new File(p.getArtifactPath());
@@ -332,8 +329,7 @@
continue;
}
try (JarFile jarFile = new JarFile(artifactFile)) {
- SortedMap<String, JarEntry> matches = gatherMatchingEntries(jarFile,
- filter);
+ SortedMap<String, JarEntry> matches = gatherMatchingEntries(jarFile, filter);
if (matches.isEmpty()) {
getLog().warn("No " + name + " file found for " + p.gav());
} else {
@@ -343,15 +339,14 @@
} else {
getLog().info(p.gav() + " has " + name + " file: " + matches.keySet());
}
- resolveContent(p, jarFile, matches.values().iterator().next(),
- contentTransformer, consumer, name);
+ resolveContent(p, jarFile, matches.values().iterator().next(), contentTransformer, consumer, name);
}
}
}
}
private void resolveContent(Project project, JarFile jarFile, JarEntry entry, UnaryOperator<String> transformer,
- BiConsumer<Project, String> contentConsumer, final String name) throws IOException {
+ BiConsumer<Project, String> contentConsumer, final String name) throws IOException {
String text = IOUtils.toString(jarFile.getInputStream(entry), StandardCharsets.UTF_8);
text = transformer.apply(text);
text = LicenseUtil.trim(text);
@@ -375,4 +370,3 @@
return matches;
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
index 6d8f9cf..97afffb 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
@@ -59,7 +59,7 @@
protected List<Override> overrides = new ArrayList<>();
@Parameter
- protected String [] models = new String [0];
+ protected String[] models = new String[0];
@Parameter
protected List<LicenseSpec> licenses = new ArrayList<>();
@@ -73,31 +73,31 @@
@Parameter
protected List<DependencySet> dependencySets = new ArrayList<>();
- @Parameter( defaultValue = "${project}", readonly = true )
+ @Parameter(defaultValue = "${project}", readonly = true)
protected MavenProject project;
- @Parameter( property = "localRepository", required = true, readonly = true )
+ @Parameter(property = "localRepository", required = true, readonly = true)
private ArtifactRepository localRepository;
- @Parameter( property = "project.remoteArtifactRepositories", required = true, readonly = true )
+ @Parameter(property = "project.remoteArtifactRepositories", required = true, readonly = true)
private List<ArtifactRepository> remoteRepositories;
- @Component( role = MavenProjectBuilder.class )
+ @Component(role = MavenProjectBuilder.class)
protected MavenProjectBuilder projectBuilder;
@Component
private ModelInheritanceAssembler assembler;
- @Parameter( defaultValue = "${session}", required = true, readonly = true )
+ @Parameter(defaultValue = "${session}", required = true, readonly = true)
protected MavenSession session;
@Component
protected ArtifactResolver artifactResolver;
- @Parameter ( required = true )
+ @Parameter(required = true)
private String location;
- @Parameter ( required = true )
+ @Parameter(required = true)
protected File licenseDirectory;
private Map<String, MavenProject> projectCache = new HashMap<>();
@@ -113,8 +113,7 @@
return licenseMap;
}
- protected void init() throws MojoExecutionException, MalformedURLException,
- ProjectBuildingException {
+ protected void init() throws MojoExecutionException, MalformedURLException, ProjectBuildingException {
excludedScopes.add("system");
excludePatterns = compileExcludePatterns();
supplementModels = SupplementalModelHelper.loadSupplements(getLog(), models);
@@ -144,7 +143,7 @@
}
private void addDependencyToLicenseMap(MavenProject depProject, List<Pair<String, String>> depLicenses,
- String depLocation) {
+ String depLocation) {
final String depGav = toGav(depProject);
getLog().debug("adding " + depGav + ", location: " + depLocation);
final MutableBoolean usedMetric = new MutableBoolean(false);
@@ -152,17 +151,16 @@
Collections.sort(depLicenses, (o1, o2) -> {
final int metric1 = getLicenseMetric(o1.getLeft());
final int metric2 = getLicenseMetric(o2.getLeft());
- usedMetric.setValue(usedMetric.booleanValue()
- || metric1 != LicenseSpec.UNDEFINED_LICENSE_METRIC
+ usedMetric.setValue(usedMetric.booleanValue() || metric1 != LicenseSpec.UNDEFINED_LICENSE_METRIC
|| metric2 != LicenseSpec.UNDEFINED_LICENSE_METRIC);
return Integer.compare(metric1, metric2);
});
if (usedMetric.booleanValue()) {
- getLog().info("Multiple licenses for " + depGav + ": " + depLicenses
- + "; taking lowest metric: " + depLicenses.get(0));
+ getLog().info("Multiple licenses for " + depGav + ": " + depLicenses + "; taking lowest metric: "
+ + depLicenses.get(0));
} else {
- getLog().warn("Multiple licenses for " + depGav + ": " + depLicenses
- + "; taking first listed: " + depLicenses.get(0));
+ getLog().warn("Multiple licenses for " + depGav + ": " + depLicenses + "; taking first listed: "
+ + depLicenses.get(0));
}
} else if (depLicenses.isEmpty()) {
getLog().info("no license defined in model for " + depGav);
@@ -179,8 +177,7 @@
} catch (MalformedURLException e) {
// we encounter this a lot. Log a warning, and use an annotated key
final String fakeLicenseUrl = depGav.replaceAll(":", "--") + "_" + licenseUrl;
- getLog().info("- URL for " + depGav + " is malformed: " + licenseUrl + "; using: "
- + fakeLicenseUrl);
+ getLog().info("- URL for " + depGav + " is malformed: " + licenseUrl + "; using: " + fakeLicenseUrl);
licenseUrl = fakeLicenseUrl;
}
}
@@ -196,7 +193,7 @@
urlToLicenseMap.put(licenseUrl, license);
for (String alias : license.getAliasUrls()) {
if (!urlToLicenseMap.containsKey(alias)) {
- urlToLicenseMap.put(alias ,license);
+ urlToLicenseMap.put(alias, license);
}
}
} else if (license.getDisplayName() == null && spec.getDisplayName() != null) {
@@ -216,11 +213,11 @@
private void buildUrlLicenseMap() throws MojoExecutionException {
for (LicenseSpec license : licenses) {
- if (urlToLicenseMap.put(license.getUrl() ,license) != null) {
+ if (urlToLicenseMap.put(license.getUrl(), license) != null) {
throw new MojoExecutionException("Duplicate URL mapping: " + license.getUrl());
}
for (String alias : license.getAliasUrls()) {
- if (urlToLicenseMap.put(alias ,license) != null) {
+ if (urlToLicenseMap.put(alias, license) != null) {
throw new MojoExecutionException("Duplicate URL mapping: " + alias);
}
}
@@ -238,20 +235,19 @@
if (dep == null) {
getLog().warn("Unused override dependency " + gav + "; ignoring...");
} else {
- final List<Pair<String, String>> newLicense = Collections.singletonList(
- new ImmutablePair<>(override.getUrl(), override.getName()));
+ final List<Pair<String, String>> newLicense =
+ Collections.singletonList(new ImmutablePair<>(override.getUrl(), override.getName()));
List<Pair<String, String>> prevLicense = dependencyLicenseMap.put(dep, newLicense);
- getLog().warn("license list for " + toGav(dep)
- + " changed with <override>; was: " + prevLicense
+ getLog().warn("license list for " + toGav(dep) + " changed with <override>; was: " + prevLicense
+ ", now: " + newLicense);
}
}
return dependencyLicenseMap;
}
- private void gatherProjectDependencies(MavenProject project, Map<MavenProject,
- List<Pair<String, String>>> dependencyLicenseMap, Map<String, MavenProject> dependencyGavMap)
- throws ProjectBuildingException {
+ private void gatherProjectDependencies(MavenProject project,
+ Map<MavenProject, List<Pair<String, String>>> dependencyLicenseMap,
+ Map<String, MavenProject> dependencyGavMap) throws ProjectBuildingException {
final Set dependencyArtifacts = project.getArtifacts();
if (dependencyArtifacts != null) {
for (Object depArtifactObj : dependencyArtifacts) {
@@ -264,8 +260,7 @@
for (Object license : dep.getLicenses()) {
final License license1 = (License) license;
String url = license1.getUrl() != null ? license1.getUrl()
- : (license1.getName() != null ? license1.getName()
- : "LICENSE_EMPTY_NAME_URL");
+ : (license1.getName() != null ? license1.getName() : "LICENSE_EMPTY_NAME_URL");
licenseUrls.add(new ImmutablePair<>(url, license1.getName()));
}
dependencyLicenseMap.put(dep, licenseUrls);
@@ -286,22 +281,21 @@
throw new ProjectBuildingException(key, "Error creating dependent artifacts", e);
}
- Model supplement = supplementModels.get(
- SupplementalModelHelper.generateSupplementMapKey(depObj.getGroupId(), depObj.getArtifactId()));
+ Model supplement = supplementModels
+ .get(SupplementalModelHelper.generateSupplementMapKey(depObj.getGroupId(), depObj.getArtifactId()));
if (supplement != null) {
Model merged = SupplementalModelHelper.mergeModels(assembler, depProj.getModel(), supplement);
- Set<String> origLicenses = depProj.getModel().getLicenses().stream().map(License::getUrl)
- .collect(Collectors.toSet());
- Set<String> newLicenses = merged.getLicenses().stream().map(License::getUrl)
- .collect(Collectors.toSet());
+ Set<String> origLicenses =
+ depProj.getModel().getLicenses().stream().map(License::getUrl).collect(Collectors.toSet());
+ Set<String> newLicenses =
+ merged.getLicenses().stream().map(License::getUrl).collect(Collectors.toSet());
if (!origLicenses.equals(newLicenses)) {
- getLog().warn("license list for " + toGav(depProj)
- + " changed with supplemental model; was: " + origLicenses
- + ", now: " + newLicenses);
+ getLog().warn("license list for " + toGav(depProj) + " changed with supplemental model; was: "
+ + origLicenses + ", now: " + newLicenses);
}
depProj = new MavenProject(merged);
- depProj.setArtifact( depObj );
- depProj.setVersion( depObj.getVersion() );
+ depProj.setArtifact(depObj);
+ depProj.setVersion(depObj.getVersion());
}
depProj.getArtifact().setScope(depObj.getScope());
projectCache.put(key, depProj);
@@ -354,4 +348,3 @@
return artifactResolver;
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java
index f2ff5dd..cd955d9 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java
@@ -46,8 +46,8 @@
@JsonCreator
public LicenseSpec(@JsonProperty("aliasUrls") List<String> aliasUrls, @JsonProperty("content") String content,
- @JsonProperty("contentFile") String contentFile, @JsonProperty("displayName") String displayName,
- @JsonProperty("metric") int metric, @JsonProperty("url") String url) {
+ @JsonProperty("contentFile") String contentFile, @JsonProperty("displayName") String displayName,
+ @JsonProperty("metric") int metric, @JsonProperty("url") String url) {
this.aliasUrls = aliasUrls;
this.content = content;
this.contentFile = contentFile;
@@ -77,8 +77,7 @@
String file;
try {
URI uri = new URI(url);
- file = ((uri.getHost() != null ? uri.getHost() : "")
- + uri.getPath()).replaceAll(BAD_CHARS, "_");
+ file = ((uri.getHost() != null ? uri.getHost() : "") + uri.getPath()).replaceAll(BAD_CHARS, "_");
} catch (URISyntaxException e) {
file = url.replaceAll(BAD_CHARS, "_");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java
index 30588d4..a80dc1d 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java
@@ -91,7 +91,7 @@
private static void doTrim(Writer out, BufferedReader reader, int extraPadding, int wrapLength) throws IOException {
boolean head = true;
int empty = 0;
- for (String line = reader.readLine(); line != null; line = reader.readLine() ) {
+ for (String line = reader.readLine(); line != null; line = reader.readLine()) {
if ("".equals(line.trim())) {
if (!head) {
empty++;
@@ -136,7 +136,7 @@
continue;
}
String fullyTrimmed = line.trim();
- freeSpaces = Math.min(freeSpaces, rightTrimmed.length() - fullyTrimmed.length());
+ freeSpaces = Math.min(freeSpaces, rightTrimmed.length() - fullyTrimmed.length());
maxLineLength = Math.max(maxLineLength, fullyTrimmed.length());
}
return new ImmutablePair<>(freeSpaces, maxLineLength);
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java
index c99a047..aa532e6 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java
@@ -18,7 +18,7 @@
*/
package org.apache.hyracks.maven.license;
-public class Override {
+public class Override {
@SuppressWarnings("unused") // set by Maven plugin configuration
private String url;
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java
index c713b08..0a24a76 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java
@@ -82,15 +82,14 @@
}
private void ensureCDDLSourcesPointer(Collection<Project> projects, ArtifactRepository central,
- ArtifactResolutionRequest request)
- throws ProjectBuildingException, IOException {
+ ArtifactResolutionRequest request) throws ProjectBuildingException, IOException {
for (Project p : projects) {
if (p.getSourcePointer() != null) {
continue;
}
mojo.getLog().debug("finding sources for artifact: " + p);
- Artifact sourcesArtifact = new DefaultArtifact(p.getGroupId(), p.getArtifactId(),
- p.getVersion(), Artifact.SCOPE_COMPILE, "jar", "sources", null);
+ Artifact sourcesArtifact = new DefaultArtifact(p.getGroupId(), p.getArtifactId(), p.getVersion(),
+ Artifact.SCOPE_COMPILE, "jar", "sources", null);
MavenProject mavenProject = mojo.resolveDependency(sourcesArtifact);
sourcesArtifact.setArtifactHandler(mavenProject.getArtifact().getArtifactHandler());
final ArtifactRepository localRepo = mojo.getSession().getLocalRepository();
@@ -162,7 +161,7 @@
@java.lang.Override
public String pathOfLocalRepositoryMetadata(ArtifactMetadata artifactMetadata,
- ArtifactRepository artifactRepository) {
+ ArtifactRepository artifactRepository) {
return null;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java
index 77b8afd..f58b419 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java
@@ -43,7 +43,7 @@
private static final String PARAM_NAME_WRAP = "wrap";
@Override
- public void execute(Environment env, Map params, TemplateModel [] loopVars, TemplateDirectiveBody body)
+ public void execute(Environment env, Map params, TemplateModel[] loopVars, TemplateDirectiveBody body)
throws TemplateException, IOException {
int numSpaces = -1;
@@ -106,8 +106,7 @@
}
private TemplateModelException paramException(String paramName, String message) throws TemplateModelException {
- return new TemplateModelException(
- "The '" + paramName + "' parameter " + message);
+ return new TemplateModelException("The '" + paramName + "' parameter " + message);
}
private static class IndentingWriter extends Writer {
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java
index 67da23f..2b03fe2 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java
@@ -46,9 +46,7 @@
private static final String PARAM_DEFAULT_TEXT = "defaultOnMissing";
@Override
- public void execute(Environment env,
- Map params, TemplateModel[] loopVars,
- TemplateDirectiveBody body)
+ public void execute(Environment env, Map params, TemplateModel[] loopVars, TemplateDirectiveBody body)
throws TemplateException, IOException {
String fileParam = null;
@@ -91,15 +89,14 @@
}
}
if (fileParam == null) {
- throw new TemplateModelException(
- "The required \"" + PARAM_FILE + "\" parameter"
- + "is missing.");
+ throw new TemplateModelException("The required \"" + PARAM_FILE + "\" parameter" + "is missing.");
}
if (body != null) {
throw new TemplateModelException("Body is not supported by this directive");
}
Writer out = env.getOut();
- File baseDir = ((FileTemplateLoader)((Configuration)env.getTemplate().getParent()).getTemplateLoader()).baseDir;
+ File baseDir =
+ ((FileTemplateLoader) ((Configuration) env.getTemplate().getParent()).getTemplateLoader()).baseDir;
File file = new File(baseDir, fileParam);
if (file.exists()) {
if (trimParam) {
@@ -108,7 +105,7 @@
} else {
IOUtils.copy(new FileInputStream(file), out, StandardCharsets.UTF_8);
}
- } else if (defaultParam != null ) {
+ } else if (defaultParam != null) {
out.append(defaultParam).append("\n");
} else {
throw new IOException("File not found: " + file);
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java
index 952b91a..f6f9f32 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java
@@ -38,7 +38,7 @@
@JsonCreator
public LicensedProjects(@JsonProperty("license") LicenseSpec license,
- @JsonProperty("projects") Set<Project> projects) {
+ @JsonProperty("projects") Set<Project> projects) {
this.license = license;
this.projects.addAll(projects);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java
index 80d4548..ff35162 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java
@@ -44,8 +44,8 @@
@JsonIgnore
private MavenProject mavenProject;
- public static final Comparator<Project> PROJECT_COMPARATOR = (o1, o2) ->
- o1.compareToken().compareTo(o2.compareToken());
+ public static final Comparator<Project> PROJECT_COMPARATOR =
+ (o1, o2) -> o1.compareToken().compareTo(o2.compareToken());
public Project(MavenProject project, String location, File artifactPath) {
mavenProject = project;
@@ -60,10 +60,10 @@
@JsonCreator
public Project(@JsonProperty("name") String name, @JsonProperty("groupId") String groupId,
- @JsonProperty("artifactId") String artifactId, @JsonProperty("url") String url,
- @JsonProperty("version") String version, @JsonProperty("location") String location,
- @JsonProperty("artifactPath") String artifactPath, @JsonProperty("noticeText") String noticeText,
- @JsonProperty("licenseText") String licenseText) {
+ @JsonProperty("artifactId") String artifactId, @JsonProperty("url") String url,
+ @JsonProperty("version") String version, @JsonProperty("location") String location,
+ @JsonProperty("artifactPath") String artifactPath, @JsonProperty("noticeText") String noticeText,
+ @JsonProperty("licenseText") String licenseText) {
this.name = name;
this.groupId = groupId;
this.artifactId = artifactId;
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
index 81636de..286320b 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
@@ -265,8 +265,8 @@
pendingWriteEventsCounter.decrement();
}
BitSet pendingChannelCreditsBitmap = cSet.getPendingChannelCreditsBitmap();
- for (int j = pendingChannelCreditsBitmap.nextSetBit(0); j >= 0; j = pendingChannelCreditsBitmap
- .nextSetBit(j)) {
+ for (int j = pendingChannelCreditsBitmap.nextSetBit(0); j >= 0; j =
+ pendingChannelCreditsBitmap.nextSetBit(j)) {
writerState.command.setChannelId(j);
writerState.command.setCommandType(MuxDemuxCommand.CommandType.ADD_CREDITS);
ChannelControlBlock ccb = cSet.getCCB(j);
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java
index e1590e7..8aa5eab 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java
@@ -83,4 +83,3 @@
}
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
index e0cf1a80..9525192 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
@@ -64,8 +64,8 @@
0.000176 },
{ 1.0, 0.0513, 0.00998, 0.00312, 0.0013, 0.000663, 0.000394, 0.000264, 0.000194, 0.000155, 0.000132,
0.000118, 0.000111, 0.000109 },
- { 1.0, 0.0488, 0.00906, 0.0027, 0.00108, 0.00053, 0.000303, 0.000196, 0.00014, 0.000108, 8.89e-05,
- 7.77e-05, 7.12e-05, 6.79e-05, 6.71e-05 } // 20
+ { 1.0, 0.0488, 0.00906, 0.0027, 0.00108, 0.00053, 0.000303, 0.000196, 0.00014, 0.000108, 8.89e-05, 7.77e-05,
+ 7.12e-05, 6.79e-05, 6.71e-05 } // 20
}; // the first column is a dummy column representing K=0.
/**
@@ -147,7 +147,8 @@
// we allocate one more bucket per element to compensate the effect introduced by using blocked bloom filter
// a detail analysis can be found at https://dl.acm.org/citation.cfm?id=1594230
- return new BloomFilterSpecification(K, bucketsPerElement + 1); }
+ return new BloomFilterSpecification(K, bucketsPerElement + 1);
+ }
/**
* Calculates the maximum number of buckets per element that this implementation
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
index 499a01a..86497a8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
@@ -228,8 +228,8 @@
} else {
// segment has enough tuples: compress segment, extract prefix,
// write prefix tuple to buffer, and set prefix slot
- newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] = slotManager
- .encodeSlotFields(fieldCountToCompress, prefixFreeSpace);
+ newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] =
+ slotManager.encodeSlotFields(fieldCountToCompress, prefixFreeSpace);
prefixFreeSpace += tupleWriter.writeTupleFields(prevTuple, 0, fieldCountToCompress,
byteBuffer.array(), prefixFreeSpace);
@@ -237,8 +237,8 @@
for (int j = 0; j < tuplesInSegment; j++) {
int currTupleIndex = segmentStart + j;
tupleToWrite.resetByTupleIndex(frame, currTupleIndex);
- newTupleSlots[tupleCount - 1 - currTupleIndex] = slotManager.encodeSlotFields(
- prefixTupleIndex, tupleFreeSpace);
+ newTupleSlots[tupleCount - 1 - currTupleIndex] =
+ slotManager.encodeSlotFields(prefixTupleIndex, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTupleFields(tupleToWrite, fieldCountToCompress,
fieldCount - fieldCountToCompress, byteBuffer.array(), tupleFreeSpace);
}
@@ -257,16 +257,16 @@
} else {
// just write the tuple uncompressed
tupleToWrite.resetByTupleIndex(frame, tupleIndex);
- newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
- FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+ newTupleSlots[tupleCount - 1 - tupleIndex] =
+ slotManager.encodeSlotFields(FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
uncompressedTupleCount++;
}
} else {
// just write the tuple uncompressed
tupleToWrite.resetByTupleIndex(frame, tupleIndex);
- newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
- FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+ newTupleSlots[tupleCount - 1 - tupleIndex] =
+ slotManager.encodeSlotFields(FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
uncompressedTupleCount++;
}
@@ -282,15 +282,16 @@
// this can happen to to the greedy solution of the knapsack-like problem
// therefore, we check if the new space exceeds the page size to avoid the only danger of
// an increasing space
- int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize() + newPrefixSlots.length
- * slotManager.getSlotSize();
+ int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize()
+ + newPrefixSlots.length * slotManager.getSlotSize();
if (totalSpace > buf.capacity())
// just leave the page as is
return false;
// copy new tuple and new slots into original page
int freeSpaceAfterInit = frame.getOrigFreeSpaceOff();
- System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace - freeSpaceAfterInit);
+ System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit,
+ tupleFreeSpace - freeSpaceAfterInit);
// copy prefix slots
int slotOffRunner = buf.capacity() - slotManager.getSlotSize();
@@ -363,9 +364,8 @@
kp.pmi[j].matches++;
int prefixBytes = tupleWriter.bytesRequired(tuple, 0, prefixFieldsMatch);
- int spaceBenefit = tupleWriter.bytesRequired(tuple)
- - tupleWriter.bytesRequired(tuple, prefixFieldsMatch, tuple.getFieldCount()
- - prefixFieldsMatch);
+ int spaceBenefit = tupleWriter.bytesRequired(tuple) - tupleWriter.bytesRequired(tuple,
+ prefixFieldsMatch, tuple.getFieldCount() - prefixFieldsMatch);
if (kp.pmi[j].matches == occurrenceThreshold) {
// if we compress this prefix, we pay the cost of storing it once, plus
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
index 8b64318..cd0a2d3 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
@@ -57,7 +57,8 @@
}
// returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
- public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple) throws HyracksDataException {
+ public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple)
+ throws HyracksDataException {
int prefixMid;
int prefixBegin = 0;
int prefixEnd = frame.getPrefixTupleCount() - 1;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
index d0f2358..cedd764 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
@@ -28,7 +28,6 @@
@FunctionalInterface
public interface IModificationOperationCallbackFactory extends Serializable {
- IModificationOperationCallback createModificationOperationCallback(LocalResource resource,
- IHyracksTaskContext ctx, IOperatorNodePushable operatorNodePushable)
- throws HyracksDataException;
+ IModificationOperationCallback createModificationOperationCallback(LocalResource resource, IHyracksTaskContext ctx,
+ IOperatorNodePushable operatorNodePushable) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
index 0a5eacc..c8020fc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
@@ -26,6 +26,6 @@
import org.apache.hyracks.storage.common.ISearchOperationCallback;
public interface ISearchOperationCallbackFactory extends Serializable {
- public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx, IOperatorNodePushable operatorNodePushable)
- throws HyracksDataException;
+ public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx,
+ IOperatorNodePushable operatorNodePushable) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java
index 3f8b6c1..90963bf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java
@@ -45,6 +45,5 @@
* @throws HyracksDataException
* If the BufferCache throws while un/pinning or un/latching.
*/
- public void diskOrderScan(ITreeIndexCursor cursor)
- throws HyracksDataException;
+ public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java
index badcf27..898321b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java
@@ -32,8 +32,7 @@
private static final long serialVersionUID = 1L;
private final IIndexBuilderFactory indexBuilderFactory;
- public IndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec,
- IIndexBuilderFactory indexBuilderFactory) {
+ public IndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec, IIndexBuilderFactory indexBuilderFactory) {
super(spec, 0, 0);
this.indexBuilderFactory = indexBuilderFactory;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
index f75144a..aae830d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
@@ -87,7 +87,7 @@
return;
}
if (canRetry(e)) {
- LOGGER.info( "Retrying drop on exception", e);
+ LOGGER.info("Retrying drop on exception", e);
continue;
}
throw e;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
index f67424e..d55962a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
@@ -181,8 +181,8 @@
ITupleReference tuple = cursor.getTuple();
writeTupleToOutput(tuple);
if (appendIndexFilter) {
- writeFilterTupleToOutput(((ILSMIndexCursor)cursor).getFilterMinTuple());
- writeFilterTupleToOutput(((ILSMIndexCursor)cursor).getFilterMaxTuple());
+ writeFilterTupleToOutput(((ILSMIndexCursor) cursor).getFilterMinTuple());
+ writeFilterTupleToOutput(((ILSMIndexCursor) cursor).getFilterMaxTuple());
}
FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java
index 92ba631..70333ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java
@@ -42,8 +42,7 @@
@Override
public int getSlotEndOff() {
- return frame.getBuffer().capacity()
- - (frame.getTupleCount() * slotSize);
+ return frame.getBuffer().capacity() - (frame.getTupleCount() * slotSize);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
index de890c4..d0757c8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
@@ -76,8 +76,8 @@
@Override
public int getSpace() {
- return buf.array().length - buf.getInt(Constants.FREE_SPACE_OFFSET) - (Integer.BYTES * buf.getInt(
- FREE_PAGE_COUNT_OFFSET));
+ return buf.array().length - buf.getInt(Constants.FREE_SPACE_OFFSET)
+ - (Integer.BYTES * buf.getInt(FREE_PAGE_COUNT_OFFSET));
}
@Override
@@ -209,8 +209,8 @@
private boolean isInner(IValueReference key, int tupleOffset) {
int keySize = buf.getInt(tupleOffset);
if (keySize == key.getLength()) {
- return LIFOMetaDataFrame.compare(key.getByteArray(), key.getStartOffset(), buf.array(), tupleOffset
- + Integer.BYTES, keySize) == 0;
+ return LIFOMetaDataFrame.compare(key.getByteArray(), key.getStartOffset(), buf.array(),
+ tupleOffset + Integer.BYTES, keySize) == 0;
}
return false;
}
@@ -253,8 +253,8 @@
int available = getSpace();
int required = key.getLength() + Integer.BYTES + Integer.BYTES + value.getLength();
if (available < required) {
- throw new HyracksDataException("Available space in the page ("
- + available + ") is not enough to store the key value pair(" + required + ")");
+ throw new HyracksDataException("Available space in the page (" + available
+ + ") is not enough to store the key value pair(" + required + ")");
}
buf.putInt(offset, key.getLength());
offset += Integer.BYTES;
@@ -294,14 +294,14 @@
@Override
public String toString() {
- StringBuilder aString = new StringBuilder(this.getClass().getSimpleName()).append('\n').
- append("Tuple Count: " + getTupleCount()).append('\n').
- append("Free Space offset: " + buf.getInt(Constants.FREE_SPACE_OFFSET)).append('\n').
- append("Level: " + buf.get(Constants.LEVEL_OFFSET)).append('\n').
- append("Version: " + buf.getInt(STORAGE_VERSION_OFFSET)).append('\n').
- append("Max Page: " + buf.getInt(MAX_PAGE_OFFSET)).append('\n').
- append("Root Page: " + buf.getInt(ROOT_PAGE_OFFSET)).append('\n').
- append("Number of free pages: " + buf.getInt(FREE_PAGE_COUNT_OFFSET));
+ StringBuilder aString = new StringBuilder(this.getClass().getSimpleName()).append('\n')
+ .append("Tuple Count: " + getTupleCount()).append('\n')
+ .append("Free Space offset: " + buf.getInt(Constants.FREE_SPACE_OFFSET)).append('\n')
+ .append("Level: " + buf.get(Constants.LEVEL_OFFSET)).append('\n')
+ .append("Version: " + buf.getInt(STORAGE_VERSION_OFFSET)).append('\n')
+ .append("Max Page: " + buf.getInt(MAX_PAGE_OFFSET)).append('\n')
+ .append("Root Page: " + buf.getInt(ROOT_PAGE_OFFSET)).append('\n')
+ .append("Number of free pages: " + buf.getInt(FREE_PAGE_COUNT_OFFSET));
int tupleCount = getTupleCount();
int offset;
for (int i = 0; i < tupleCount; i++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java
index a051364..5c389d2 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java
@@ -345,10 +345,8 @@
try {
frame.setPage(page);
int inPageOffset = frame.getOffset(key);
- return inPageOffset >= 0
- ? ((long) pageId * bufferCache.getPageSizeWithHeader()) + frame.getOffset(key)
- + IBufferCache.RESERVED_HEADER_BYTES
- : -1L;
+ return inPageOffset >= 0 ? ((long) pageId * bufferCache.getPageSizeWithHeader()) + frame.getOffset(key)
+ + IBufferCache.RESERVED_HEADER_BYTES : -1L;
} finally {
page.releaseReadLatch();
unpinPage(page);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java
index d8afd12..951d824 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java
@@ -240,8 +240,8 @@
@Override
public void close() throws HyracksDataException {
if (ready) {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()),
- false);
+ ICachedPage metaNode =
+ bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()), false);
ITreeIndexMetadataFrame metaFrame = frameFactory.createFrame();
metaNode.acquireWriteLatch();
try {
@@ -319,8 +319,8 @@
public long getFileOffset(ITreeIndexMetadataFrame frame, IValueReference key) throws HyracksDataException {
int metadataPageNum = getMetadataPageId();
if (metadataPageNum != IBufferCache.INVALID_PAGEID) {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()),
- false);
+ ICachedPage metaNode =
+ bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()), false);
metaNode.acquireReadLatch();
try {
frame.setPage(metaNode);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java
index 8245338..15aba57 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java
@@ -27,7 +27,7 @@
/**
* Dummy operation callback that simply does nothing.
*/
-public enum NoOpOperationCallback implements IModificationOperationCallback,ISearchOperationCallback {
+public enum NoOpOperationCallback implements IModificationOperationCallback, ISearchOperationCallback {
INSTANCE;
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
index 4ef89d1..925642e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
@@ -36,7 +36,8 @@
INSTANCE;
@Override
- public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx, IOperatorNodePushable operatorNodePushable) {
+ public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx,
+ IOperatorNodePushable operatorNodePushable) {
return NoOpOperationCallback.INSTANCE;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java
index ce2aa38..b192de7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java
@@ -20,5 +20,8 @@
package org.apache.hyracks.storage.am.common.ophelpers;
public enum FindTupleMode {
- INCLUSIVE, EXCLUSIVE, EXCLUSIVE_ERROR_IF_EXISTS, EXACT
+ INCLUSIVE,
+ EXCLUSIVE,
+ EXCLUSIVE_ERROR_IF_EXISTS,
+ EXACT
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
index e9dfaff..2081122 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
@@ -20,5 +20,7 @@
package org.apache.hyracks.storage.am.common.ophelpers;
public enum FindTupleNoExactMatchPolicy {
- LOWER_KEY, HIGHER_KEY, NONE
+ LOWER_KEY,
+ HIGHER_KEY,
+ NONE
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java
index 609c51a..e82b037 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java
@@ -98,7 +98,7 @@
@Override
public int getTupleSize() {
- return nullFlagsBytes + fieldSlotsBytes + ShortPointable.getShort(buf, tupleStartOff + nullFlagsBytes
- + (fieldCount - 1) * 2);
+ return nullFlagsBytes + fieldSlotsBytes
+ + ShortPointable.getShort(buf, tupleStartOff + nullFlagsBytes + (fieldCount - 1) * 2);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java
index 4ee9f53..ad0b030 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java
@@ -75,41 +75,30 @@
strBuilder.append("TREE LEVELS: " + treeLevels + "\n");
strBuilder.append("FREE PAGES : " + freePages + "\n");
strBuilder.append("META PAGES : " + metaPages + "\n");
- long totalPages = interiorStats.getNumPages() + leafStats.getNumPages()
- + freePages + metaPages;
+ long totalPages = interiorStats.getNumPages() + leafStats.getNumPages() + freePages + metaPages;
strBuilder.append("TOTAL PAGES : " + totalPages + "\n");
strBuilder.append("\n");
strBuilder.append("ROOT STATS" + "\n");
- strBuilder
- .append("NUM TUPLES: " + rootStats.getNumTuples() + "\n");
- strBuilder.append("FILL FACTOR : "
- + df.format(rootStats.getAvgFillFactor()) + "\n");
+ strBuilder.append("NUM TUPLES: " + rootStats.getNumTuples() + "\n");
+ strBuilder.append("FILL FACTOR : " + df.format(rootStats.getAvgFillFactor()) + "\n");
if (interiorStats.getNumPages() > 0) {
strBuilder.append("\n");
strBuilder.append("INTERIOR STATS" + "\n");
- strBuilder.append("NUM PAGES: " + interiorStats.getNumPages()
- + "\n");
- strBuilder.append("NUM TUPLES: "
- + interiorStats.getNumTuples() + "\n");
- strBuilder.append("AVG TUPLES/PAGE: "
- + df.format(interiorStats.getAvgNumTuples()) + "\n");
- strBuilder.append("AVG FILL FACTOR: "
- + df.format(interiorStats.getAvgFillFactor()) + "\n");
+ strBuilder.append("NUM PAGES: " + interiorStats.getNumPages() + "\n");
+ strBuilder.append("NUM TUPLES: " + interiorStats.getNumTuples() + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(interiorStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(interiorStats.getAvgFillFactor()) + "\n");
}
if (leafStats.getNumPages() > 0) {
strBuilder.append("\n");
strBuilder.append("LEAF STATS" + "\n");
- strBuilder.append("NUM PAGES: "
- + df.format(leafStats.getNumPages()) + "\n");
- strBuilder.append("NUM TUPLES: "
- + df.format(leafStats.getNumTuples()) + "\n");
- strBuilder.append("AVG TUPLES/PAGE: "
- + df.format(leafStats.getAvgNumTuples()) + "\n");
- strBuilder.append("AVG FILL FACTOR: "
- + df.format(leafStats.getAvgFillFactor()) + "\n");
+ strBuilder.append("NUM PAGES: " + df.format(leafStats.getNumPages()) + "\n");
+ strBuilder.append("NUM TUPLES: " + df.format(leafStats.getNumTuples()) + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(leafStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(leafStats.getAvgFillFactor()) + "\n");
}
return strBuilder.toString();
@@ -130,8 +119,7 @@
public void add(ITreeIndexFrame frame) {
numPages++;
numTuples += frame.getTupleCount();
- sumFillFactors += (double) (frame.getBuffer().capacity() - frame
- .getTotalFreeSpace())
+ sumFillFactors += (double) (frame.getBuffer().capacity() - frame.getTotalFreeSpace())
/ (double) frame.getBuffer().capacity();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java
index 37ecfc0..ee31aaf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java
@@ -27,7 +27,8 @@
@SuppressWarnings("rawtypes")
public class TreeIndexUtils {
- public static String printFrameTuples(ITreeIndexFrame frame, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+ public static String printFrameTuples(ITreeIndexFrame frame, ISerializerDeserializer[] fieldSerdes)
+ throws HyracksDataException {
StringBuilder strBuilder = new StringBuilder();
ITreeIndexTupleReference tuple = frame.createTupleReference();
for (int i = 0; i < frame.getTupleCount(); i++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java
index fbb930d..0df558f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java
@@ -41,8 +41,7 @@
Assert.assertNull(longPointable.getByteArray());
byte[] longBytes = new byte[Long.BYTES];
MutableArrayValueReference value = new MutableArrayValueReference(longBytes);
- int space = frame.getSpace() - (value.getLength() + Integer.BYTES * 2
- + testKey.getLength());
+ int space = frame.getSpace() - (value.getLength() + Integer.BYTES * 2 + testKey.getLength());
for (long l = 1L; l < 52L; l++) {
LongPointable.setLong(longBytes, 0, l);
frame.put(testKey, value);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
index 482ad38..0593ad5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
@@ -364,8 +364,7 @@
public LSMBTreeOpContext createOpContext(IModificationOperationCallback modificationCallback,
ISearchOperationCallback searchCallback) {
int numBloomFilterKeyFields = hasBloomFilter
- ? ((LSMBTreeWithBloomFilterDiskComponentFactory) componentFactory).getBloomFilterKeyFields().length
- : 0;
+ ? ((LSMBTreeWithBloomFilterDiskComponentFactory) componentFactory).getBloomFilterKeyFields().length : 0;
return new LSMBTreeOpContext(this, memoryComponents, insertLeafFrameFactory, deleteLeafFrameFactory,
modificationCallback, searchCallback, numBloomFilterKeyFields, getTreeFields(), getFilterFields(),
getHarness(), getFilterCmpFactories(), tracer);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
index 44002ae..7c924b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
@@ -32,7 +32,7 @@
public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
int tupleSize = bytesRequired(tuple);
byte[] buf = tuple.getFieldData(0);
- int tupleStartOff = ((LSMBTreeTupleReference)tuple).getTupleStart();
+ int tupleStartOff = ((LSMBTreeTupleReference) tuple).getTupleStart();
System.arraycopy(buf, tupleStartOff, targetBuf, targetOff, tupleSize);
return tupleSize;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java
index 0d58b85..2ca162d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java
@@ -24,6 +24,7 @@
public class VirtualFreePageManagerFactory implements IPageManagerFactory {
private static final long serialVersionUID = 1L;
+
@Override
public IPageManager createPageManager(IBufferCache bufferCache) {
return new VirtualFreePageManager(bufferCache);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java
index 7359d2b..625f81e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java
@@ -35,9 +35,8 @@
protected final PermutingTupleReference filterTuple;
protected final MultiComparator filterCmp;
- public FilterBulkLoader(ILSMComponentFilter filter, ITreeIndex treeIndex,
- ILSMComponentFilterManager filterManager, int[] indexFields, int[] filterFields,
- MultiComparator filterCmp) {
+ public FilterBulkLoader(ILSMComponentFilter filter, ITreeIndex treeIndex, ILSMComponentFilterManager filterManager,
+ int[] indexFields, int[] filterFields, MultiComparator filterCmp) {
this.filter = filter;
this.treeIndex = treeIndex;
this.filterManager = filterManager;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index b2c48e1..fa3093c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -298,9 +298,8 @@
if (!inactiveDiskComponents.isEmpty()) {
for (ILSMDiskComponent inactiveComp : inactiveDiskComponents) {
if (inactiveComp.getFileReferenceCount() == 1) {
- inactiveDiskComponentsToBeDeleted =
- inactiveDiskComponentsToBeDeleted == null ? new LinkedList<>()
- : inactiveDiskComponentsToBeDeleted;
+ inactiveDiskComponentsToBeDeleted = inactiveDiskComponentsToBeDeleted == null
+ ? new LinkedList<>() : inactiveDiskComponentsToBeDeleted;
inactiveDiskComponentsToBeDeleted.add(inactiveComp);
}
}
@@ -627,8 +626,8 @@
boolean failedOperation = false;
try {
newComponent = lsmIndex.merge(operation);
- operation.getCallback()
- .afterOperation(LSMIOOperationType.MERGE, ctx.getComponentHolder(), newComponent);
+ operation.getCallback().afterOperation(LSMIOOperationType.MERGE, ctx.getComponentHolder(),
+ newComponent);
newComponent.markAsValid(lsmIndex.isDurable());
} catch (Throwable e) { // NOSONAR: Log and re-throw
failedOperation = true;
@@ -808,8 +807,7 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (LOGGER.isWarnEnabled()) {
- LOGGER.log(Level.WARN, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex,
- e);
+ LOGGER.log(Level.WARN, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex, e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
index bd82822..9cfaf7a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
@@ -34,7 +34,7 @@
protected void setTokenizingTupleIterator() {
IBinaryTokenizer tokenizer = getTokenizerFactory().createTokenizer();
- setTupleIter(new PartitionedInvertedIndexTokenizingTupleIterator(tokenCmpFactories.length, btree.getFieldCount()
- - tokenCmpFactories.length, tokenizer));
+ setTupleIter(new PartitionedInvertedIndexTokenizingTupleIterator(tokenCmpFactories.length,
+ btree.getFieldCount() - tokenCmpFactories.length, tokenizer));
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
index 56520b8..94ce348 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
@@ -117,8 +117,8 @@
currentPageIx = binarySearch(elementIndexes, 0, numPages, elementIx);
if (currentPageIx < 0) {
- throw new IndexOutOfBoundsException("Requested index: " + elementIx + " from array with numElements: "
- + numElements);
+ throw new IndexOutOfBoundsException(
+ "Requested index: " + elementIx + " from array with numElements: " + numElements);
}
if (currentPageIx == 0) {
@@ -223,8 +223,8 @@
public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException {
StringBuilder strBuilder = new StringBuilder();
for (int i = 0; i < tuple.getFieldCount(); i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Object o = serdes[i].deserialize(dataIn);
strBuilder.append(o.toString());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
index 84c453b..5846e25 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
@@ -23,7 +23,7 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-public class ArrayListFactory<T> implements IObjectFactory<ArrayList<T>>{
+public class ArrayListFactory<T> implements IObjectFactory<ArrayList<T>> {
@Override
public ArrayList<T> create() {
return new ArrayList<T>();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java
index 6116322..decc499 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java
@@ -21,7 +21,6 @@
public class ConjunctiveEditDistanceSearchModifier extends EditDistanceSearchModifier {
-
public ConjunctiveEditDistanceSearchModifier(int gramLength, int edThresh) {
super(gramLength, edThresh);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
index 4269aa7..4c9f037 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
@@ -34,8 +34,7 @@
protected final ArrayList<IInvertedListCursor> invListCursors = new ArrayList<>();
- public TOccurrenceSearcher(IHyracksCommonContext ctx, IInPlaceInvertedIndex invIndex)
- throws HyracksDataException {
+ public TOccurrenceSearcher(IHyracksCommonContext ctx, IInPlaceInvertedIndex invIndex) throws HyracksDataException {
super(ctx, invIndex);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
index ccc2c81..ed2f3be 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
@@ -115,8 +115,7 @@
// The preChar and postChar are required to be a single byte utf8 char, e.g. ASCII char.
protected void serializeToken(UTF8StringBuilder builder, GrowableArray out, int numPreChars, int numPostChars,
- char preChar, char postChar)
- throws IOException {
+ char preChar, char postChar) throws IOException {
handleTokenTypeTag(out.getDataOutput());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
index cd37ffa..3a5224c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
@@ -72,10 +72,11 @@
int tokenStart = tokensStart.get(i);
curTokenCount++; // assume we found it
int offset = 0;
- for (int charPos= 0; charPos < tokenLength; charPos++) {
+ for (int charPos = 0; charPos < tokenLength; charPos++) {
// case insensitive comparison
- if (Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset))
- != Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tokenStart + offset))) {
+ if (Character.toLowerCase(
+ UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset)) != Character
+ .toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tokenStart + offset))) {
curTokenCount--;
break;
}
@@ -93,7 +94,6 @@
tokenCount++;
}
-
// TODO Why we bother to get the tokenCount in advance? It seems a caller's problem.
@Override
public short getTokensCount() {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
index 81254fc..e583c7d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
@@ -19,16 +19,15 @@
package org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers;
-public class DelimitedUTF8StringBinaryTokenizerFactory implements
- IBinaryTokenizerFactory {
+public class DelimitedUTF8StringBinaryTokenizerFactory implements IBinaryTokenizerFactory {
private static final long serialVersionUID = 1L;
private final boolean ignoreTokenCount;
private final boolean sourceHasTypeTag;
private final ITokenFactory tokenFactory;
- public DelimitedUTF8StringBinaryTokenizerFactory(boolean ignoreTokenCount,
- boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
+ public DelimitedUTF8StringBinaryTokenizerFactory(boolean ignoreTokenCount, boolean sourceHasTypeTag,
+ ITokenFactory tokenFactory) {
this.ignoreTokenCount = ignoreTokenCount;
this.sourceHasTypeTag = sourceHasTypeTag;
this.tokenFactory = tokenFactory;
@@ -36,7 +35,6 @@
@Override
public IBinaryTokenizer createTokenizer() {
- return new DelimitedUTF8StringBinaryTokenizer(ignoreTokenCount,
- sourceHasTypeTag, tokenFactory);
+ return new DelimitedUTF8StringBinaryTokenizer(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
index 8bd0c50..711a82f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
@@ -79,8 +79,9 @@
tokenCount++; // assume found
int offset = 0;
for (int j = 0; j < gramLength; j++) {
- if (Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset))
- != Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tmpIndex + offset))) {
+ if (Character
+ .toLowerCase(UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset)) != Character
+ .toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tmpIndex + offset))) {
tokenCount--;
break;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
index 2ade6db..78c8e4d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
@@ -39,7 +39,8 @@
protected final IBinaryTokenizer tokenizer;
protected ITupleReference inputTuple;
- public InvertedIndexTokenizingTupleIterator(int tokensFieldCount, int invListFieldCount, IBinaryTokenizer tokenizer) {
+ public InvertedIndexTokenizingTupleIterator(int tokensFieldCount, int invListFieldCount,
+ IBinaryTokenizer tokenizer) {
this.invListFieldCount = invListFieldCount;
this.tupleBuilder = new ArrayTupleBuilder(tokensFieldCount + invListFieldCount);
this.tupleReference = new ArrayTupleReference();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
index a477baf..e4267e2 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
@@ -175,7 +175,6 @@
open = false;
}
-
@Override
public ITupleReference getTuple() {
return frameTuple;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
index 1825003..1ca75cb 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
@@ -21,7 +21,6 @@
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
-public interface IGenericPrimitiveSerializerDeserializer<T> extends
- ISerializerDeserializer<T> {
+public interface IGenericPrimitiveSerializerDeserializer<T> extends ISerializerDeserializer<T> {
public double getValue(byte[] bytes, int offset);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
index 7ae616a..1681eee 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
@@ -71,10 +71,12 @@
@Override
public void split(ITreeIndexFrame leftFrame, ByteBuffer buf, ITreeIndexFrame rightFrame, ISlotManager slotManager,
- ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey) throws HyracksDataException {
+ ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey)
+ throws HyracksDataException {
RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
RTreeTypeAwareTupleWriter rTreeTupleWriterleftRTreeFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame =
+ ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
RTreeNSMFrame leftRTreeFrame = ((RTreeNSMFrame) leftFrame);
@@ -92,10 +94,10 @@
for (int k = 0; k < leftRTreeFrame.getTupleCount(); ++k) {
frameTuple.resetByTupleIndex(leftRTreeFrame, k);
- double LowerKey = keyValueProviders[i]
- .getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
- double UpperKey = keyValueProviders[j]
- .getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
+ double LowerKey =
+ keyValueProviders[i].getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
+ double UpperKey =
+ keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
tupleEntries1.add(k, LowerKey);
tupleEntries2.add(k, UpperKey);
@@ -186,8 +188,8 @@
if (tupleEntries1.get(i).getTupleIndex() != -1) {
frameTuple.resetByTupleIndex(leftRTreeFrame, tupleEntries1.get(i).getTupleIndex());
rightFrame.insert(frameTuple, -1);
- ((UnorderedSlotManager) slotManager).modifySlot(
- slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+ ((UnorderedSlotManager) slotManager)
+ .modifySlot(slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
totalBytes += leftRTreeFrame.getTupleSize(frameTuple);
numOfDeletedTuples++;
} else {
@@ -198,8 +200,8 @@
((UnorderedSlotManager) slotManager).deleteEmptySlots();
// maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
- + (slotManager.getSlotSize() * numOfDeletedTuples));
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
// compact both pages
rightFrame.compact();
@@ -238,8 +240,8 @@
tupleEntries2.clear();
}
- public void generateDist(ITreeIndexFrame leftRTreeFrame, ITreeIndexTupleReference frameTuple,
- ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end) {
+ public void generateDist(ITreeIndexFrame leftRTreeFrame, ITreeIndexTupleReference frameTuple, ITupleReference tuple,
+ TupleEntryArrayList entries, Rectangle rec, int start, int end) {
int j = 0;
while (entries.get(j).getTupleIndex() == -1) {
j++;
@@ -302,20 +304,19 @@
int c = ((RTreeNSMInteriorFrame) frame).pointerCmp(frameTuple, cmpFrameTuple, cmp);
if (c != 0) {
- double intersection = RTreeComputationUtils.overlappedArea(frameTuple, tuple,
- cmpFrameTuple, cmp, keyValueProviders);
+ double intersection = RTreeComputationUtils.overlappedArea(frameTuple, tuple, cmpFrameTuple,
+ cmp, keyValueProviders);
if (intersection != 0.0) {
- difference += intersection
- - RTreeComputationUtils.overlappedArea(frameTuple, null, cmpFrameTuple, cmp,
- keyValueProviders);
+ difference += intersection - RTreeComputationUtils.overlappedArea(frameTuple, null,
+ cmpFrameTuple, cmp, keyValueProviders);
}
} else {
id = j;
}
}
- double enlargedArea = RTreeComputationUtils.enlargedArea(cmpFrameTuple, tuple, cmp,
- keyValueProviders);
+ double enlargedArea =
+ RTreeComputationUtils.enlargedArea(cmpFrameTuple, tuple, cmp, keyValueProviders);
if (difference < minOverlap) {
minOverlap = difference;
minEnlargedArea = enlargedArea;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
index 31ade3c..281dff5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
@@ -71,8 +71,8 @@
double pHigh1, pLow1;
if (tupleToBeInserted != null) {
int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
- tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i), tupleToBeInserted.getFieldLength(i));
+ tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i),
+ tupleToBeInserted.getFieldLength(i));
if (c < 0) {
pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
} else {
@@ -81,8 +81,8 @@
}
c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
- tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j), tupleToBeInserted.getFieldLength(j));
+ tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j),
+ tupleToBeInserted.getFieldLength(j));
if (c > 0) {
pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
} else {
@@ -124,16 +124,16 @@
int maxFieldPos = cmp.getKeyFieldCount() / 2;
for (int i = 0; i < maxFieldPos; i++) {
int j = maxFieldPos + i;
- int c = cmp.getComparators()[i]
- .compare(tuple1.getFieldData(i), tuple1.getFieldStart(i), tuple1.getFieldLength(i),
- tuple2.getFieldData(i), tuple2.getFieldStart(i), tuple2.getFieldLength(i));
+ int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
+ tuple1.getFieldLength(i), tuple2.getFieldData(i), tuple2.getFieldStart(i),
+ tuple2.getFieldLength(i));
if (c > 0) {
return false;
}
- c = cmp.getComparators()[j]
- .compare(tuple1.getFieldData(j), tuple1.getFieldStart(j), tuple1.getFieldLength(j),
- tuple2.getFieldData(j), tuple2.getFieldStart(j), tuple2.getFieldLength(j));
+ c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
+ tuple1.getFieldLength(j), tuple2.getFieldData(j), tuple2.getFieldStart(j),
+ tuple2.getFieldLength(j));
if (c < 0) {
return false;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
index ba542ea..05d04f6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -115,8 +115,7 @@
@Override
public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
- IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
- throws HyracksDataException {
+ IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
rtreePolicy.split(this, buf, rightFrame, slotManager, frameTuple, tuple, splitKey);
}
@@ -166,15 +165,12 @@
@Override
public String toString() {
- return new StringBuilder(this.getClass().getSimpleName()).append('\n').append(
- "Tuple Count: " + getTupleCount()).append('\n').append("Free Space offset: " + buf
- .getInt(Constants.FREE_SPACE_OFFSET)).append('\n').append("Level: " + buf
- .get(Constants.LEVEL_OFFSET)).append('\n').append("LSN: "
- + buf.getLong(PAGE_LSN_OFFSET)).append('\n').append(
- "Total Free Space: " + buf.getInt(TOTAL_FREE_SPACE_OFFSET)).append(
- '\n').append("Flag: " + buf.get(
- FLAG_OFFSET)).append('\n')
- .append("NSN: " + buf.getLong(PAGE_NSN_OFFSET)).append('\n').append("Right Page:")
- .append(buf.getInt(RIGHT_PAGE_OFFSET)).toString();
+ return new StringBuilder(this.getClass().getSimpleName()).append('\n').append("Tuple Count: " + getTupleCount())
+ .append('\n').append("Free Space offset: " + buf.getInt(Constants.FREE_SPACE_OFFSET)).append('\n')
+ .append("Level: " + buf.get(Constants.LEVEL_OFFSET)).append('\n')
+ .append("LSN: " + buf.getLong(PAGE_LSN_OFFSET)).append('\n')
+ .append("Total Free Space: " + buf.getInt(TOTAL_FREE_SPACE_OFFSET)).append('\n')
+ .append("Flag: " + buf.get(FLAG_OFFSET)).append('\n').append("NSN: " + buf.getLong(PAGE_NSN_OFFSET))
+ .append('\n').append("Right Page:").append(buf.getInt(RIGHT_PAGE_OFFSET)).toString();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
index b8b5a8c..5550e1f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -38,8 +38,8 @@
for (int i = 0; i < keyValueProviders.length; i++) {
keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
}
- return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders,
- rtreePolicyType, isPointMBR);
+ return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders, rtreePolicyType,
+ isPointMBR);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
index e112b86..16b22c0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -38,8 +38,8 @@
for (int i = 0; i < keyValueProviders.length; i++) {
keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
}
- return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders,
- rtreePolicyType, isPointMBR);
+ return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders, rtreePolicyType,
+ isPointMBR);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java
index 0dea4c2..623ef21 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java
@@ -62,10 +62,12 @@
@Override
public void split(ITreeIndexFrame leftFrame, ByteBuffer buf, ITreeIndexFrame rightFrame, ISlotManager slotManager,
- ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey) throws HyracksDataException {
+ ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey)
+ throws HyracksDataException {
RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame =
+ ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
RTreeNSMFrame leftRTreeFrame = ((RTreeNSMFrame) leftFrame);
@@ -75,10 +77,10 @@
for (int i = 0; i < maxFieldPos; i++) {
int j = maxFieldPos + i;
frameTuple.resetByTupleIndex(leftRTreeFrame, 0);
- double leastLowerValue = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i));
- double greatestUpperValue = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j));
+ double leastLowerValue =
+ keyValueProviders[i].getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
+ double greatestUpperValue =
+ keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
double leastUpperValue = leastLowerValue;
double greatestLowerValue = greatestUpperValue;
int leastUpperIndex = 0;
@@ -88,16 +90,16 @@
int tupleCount = leftRTreeFrame.getTupleCount();
for (int k = 1; k < tupleCount; ++k) {
frameTuple.resetByTupleIndex(leftRTreeFrame, k);
- double lowerValue = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i));
+ double lowerValue =
+ keyValueProviders[i].getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
if (lowerValue > greatestLowerValue) {
greatestLowerIndex = k;
cmpFrameTuple.resetByTupleIndex(leftRTreeFrame, k);
greatestLowerValue = keyValueProviders[i].getValue(cmpFrameTuple.getFieldData(i),
cmpFrameTuple.getFieldStart(i));
}
- double higherValue = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j));
+ double higherValue =
+ keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
if (higherValue < leastUpperValue) {
leastUpperIndex = k;
cmpFrameTuple.resetByTupleIndex(leftRTreeFrame, k);
@@ -169,8 +171,8 @@
((UnorderedSlotManager) slotManager).deleteEmptySlots();
// maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
- + (slotManager.getSlotSize() * numOfDeletedTuples));
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
// compact both pages
rightFrame.compact();
@@ -196,7 +198,8 @@
splitKey.initData(splitKeySize);
leftRTreeFrame.adjustMBR();
- rTreeTupleWriterLeftFrame.writeTupleFields(leftRTreeFrame.getMBRTuples(), 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeTupleWriterLeftFrame.writeTupleFields(leftRTreeFrame.getMBRTuples(), 0, rTreeSplitKey.getLeftPageBuffer(),
+ 0);
rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer().array(), 0);
((IRTreeFrame) rightFrame).adjustMBR();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java
index 8ca9842..d9dbd81 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java
@@ -20,5 +20,6 @@
package org.apache.hyracks.storage.am.rtree.frames;
public enum RTreePolicyType {
- RTREE, RSTARTREE
+ RTREE,
+ RSTARTREE
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java
index d6d69bb..8798241 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java
@@ -20,5 +20,6 @@
package org.apache.hyracks.storage.am.rtree.impls;
public enum EntriesOrder {
- ASCENDING, DESCENDING
+ ASCENDING,
+ DESCENDING
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
index f6bdcfb..11a5b2b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
@@ -35,8 +35,7 @@
public int keySize = 0;
- public RTreeSplitKey(ITreeIndexTupleReference leftTuple,
- ITreeIndexTupleReference rightTuple) {
+ public RTreeSplitKey(ITreeIndexTupleReference leftTuple, ITreeIndexTupleReference rightTuple) {
this.leftTuple = leftTuple;
this.rightTuple = rightTuple;
}
@@ -114,8 +113,7 @@
rightPageBuf.putInt(keySize, page);
}
- public ISplitKey duplicate(ITreeIndexTupleReference copyLeftTuple,
- ITreeIndexTupleReference copyRightTuple) {
+ public ISplitKey duplicate(ITreeIndexTupleReference copyLeftTuple, ITreeIndexTupleReference copyRightTuple) {
RTreeSplitKey copy = new RTreeSplitKey(copyLeftTuple, copyRightTuple);
copy.leftPageData = leftPageData.clone();
copy.leftPageBuf = ByteBuffer.wrap(copy.leftPageData);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java
index cbfd245..c74f712 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java
@@ -64,13 +64,13 @@
public void enlarge(ITupleReference tupleToBeInserted, IPrimitiveValueProvider[] valueProviders) {
for (int i = 0; i < getDim(); i++) {
int j = getDim() + i;
- double low = valueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i));
+ double low =
+ valueProviders[i].getValue(tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i));
if (getLow(i) > low) {
setLow(i, low);
}
- double high = valueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j));
+ double high =
+ valueProviders[j].getValue(tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j));
if (getHigh(i) < high) {
setHigh(i, high);
}
@@ -84,8 +84,8 @@
for (int i = 0; i < getDim(); i++) {
int j = getDim() + i;
- double low = valueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i));
+ double low =
+ valueProviders[i].getValue(tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i));
double lowAfterEnlargement;
if (getLow(i) > low) {
lowAfterEnlargement = low;
@@ -93,8 +93,8 @@
lowAfterEnlargement = getLow(i);
}
- double high = valueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j));
+ double high =
+ valueProviders[j].getValue(tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j));
double highAfterEnlargement;
if (getHigh(i) < high) {
highAfterEnlargement = high;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
index 3194674..447940f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
@@ -54,8 +54,8 @@
private IntArrayList stateStack = new IntArrayList(1000, 200);
private DoubleArrayList boundsStack = new DoubleArrayList(2000, 400);
- private IPrimitiveValueProvider valueProvider = DoublePrimitiveValueProviderFactory.INSTANCE
- .createPrimitiveValueProvider();
+ private IPrimitiveValueProvider valueProvider =
+ DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
private double[] a;
private double[] b;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
index 30eb991..13aed8c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
@@ -36,8 +36,8 @@
private double stepsize;
private DoubleArrayList boundsStack = new DoubleArrayList(2000, 400);
- private IPrimitiveValueProvider valueProvider = DoublePrimitiveValueProviderFactory.INSTANCE
- .createPrimitiveValueProvider();
+ private IPrimitiveValueProvider valueProvider =
+ DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
private double[] a;
private double[] b;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java
index b4f364c..dbead1e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java
@@ -34,7 +34,7 @@
protected BufferCache bufferCache;
volatile protected PageQueue pageQueue;
- public AsyncFIFOPageQueueManager(BufferCache bufferCache){
+ public AsyncFIFOPageQueueManager(BufferCache bufferCache) {
this.bufferCache = bufferCache;
}
@@ -43,7 +43,8 @@
public final IFIFOPageWriter writer;
protected PageQueue(IBufferCache bufferCache, IFIFOPageWriter writer) {
- if(DEBUG) System.out.println("[FIFO] New Queue");
+ if (DEBUG)
+ System.out.println("[FIFO] New Queue");
this.bufferCache = bufferCache;
this.writer = writer;
}
@@ -59,10 +60,9 @@
@Override
public void put(ICachedPage page) throws HyracksDataException {
try {
- if(!poisoned.get()) {
+ if (!poisoned.get()) {
queue.put(page);
- }
- else{
+ } else {
throw new HyracksDataException("Queue is closing");
}
} catch (InterruptedException e) {
@@ -72,22 +72,21 @@
}
}
-
public PageQueue createQueue(IFIFOPageWriter writer) {
if (pageQueue == null) {
- synchronized(this){
+ synchronized (this) {
if (pageQueue == null) {
writerThread = new Thread(this);
writerThread.setName("FIFO Writer Thread");
writerThread.start();
- pageQueue = new PageQueue(bufferCache,writer);
+ pageQueue = new PageQueue(bufferCache, writer);
}
}
}
return pageQueue;
}
- public void destroyQueue(){
+ public void destroyQueue() {
poisoned.set(true);
if (writerThread == null) {
synchronized (this) {
@@ -99,16 +98,16 @@
//Dummy cached page to act as poison pill
CachedPage poisonPill = new CachedPage();
- poisonPill.setQueueInfo(new QueueInfo(true,true));
+ poisonPill.setQueueInfo(new QueueInfo(true, true));
- try{
+ try {
synchronized (poisonPill) {
queue.put(poisonPill);
- while(queue.contains(poisonPill)){
+ while (queue.contains(poisonPill)) {
poisonPill.wait();
}
}
- } catch (InterruptedException e){
+ } catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@@ -139,26 +138,30 @@
@Override
public void run() {
- if (DEBUG) System.out.println("[FIFO] Writer started");
+ if (DEBUG)
+ System.out.println("[FIFO] Writer started");
boolean die = false;
while (!die) {
ICachedPage entry;
try {
entry = queue.take();
- } catch(InterruptedException e) {
+ } catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
- if (entry.getQueueInfo() != null && entry.getQueueInfo().hasWaiters()){
- synchronized(entry) {
- if(entry.getQueueInfo().isPoison()) { die = true; }
+ if (entry.getQueueInfo() != null && entry.getQueueInfo().hasWaiters()) {
+ synchronized (entry) {
+ if (entry.getQueueInfo().isPoison()) {
+ die = true;
+ }
entry.notifyAll();
continue;
}
}
- if (DEBUG) System.out.println("[FIFO] Write " + BufferedFileHandle.getFileId(((CachedPage)entry).dpid)+","
- + BufferedFileHandle.getPageId(((CachedPage)entry).dpid));
+ if (DEBUG)
+ System.out.println("[FIFO] Write " + BufferedFileHandle.getFileId(((CachedPage) entry).dpid) + ","
+ + BufferedFileHandle.getPageId(((CachedPage) entry).dpid));
try {
pageQueue.getWriter().write(entry, bufferCache);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
index 302c7b2..1443bbc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
@@ -1307,21 +1307,17 @@
finishQueue();
if (cycleCount > MAX_PIN_ATTEMPT_CYCLES) {
cycleCount = 0; // suppress warning below
- throw new HyracksDataException(
- "Unable to find free page in buffer cache after " + MAX_PIN_ATTEMPT_CYCLES
- + " cycles (buffer cache undersized?)" + (DEBUG
- ? " ; " + (masterPinCount.get() - startingPinCount)
- + " successful pins since start of cycle"
- : ""));
+ throw new HyracksDataException("Unable to find free page in buffer cache after "
+ + MAX_PIN_ATTEMPT_CYCLES + " cycles (buffer cache undersized?)"
+ + (DEBUG ? " ; " + (masterPinCount.get() - startingPinCount)
+ + " successful pins since start of cycle" : ""));
}
}
} finally {
if (cycleCount > PIN_ATTEMPT_CYCLES_WARNING_THRESHOLD && LOGGER.isWarnEnabled()) {
LOGGER.warn("Took " + cycleCount + " cycles to find free page in buffer cache. (buffer cache "
- + "undersized?)" + (DEBUG
- ? " ; " + (masterPinCount.get() - startingPinCount)
- + " successful pins since start of cycle"
- : ""));
+ + "undersized?)" + (DEBUG ? " ; " + (masterPinCount.get() - startingPinCount)
+ + " successful pins since start of cycle" : ""));
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
index a6a3bc8..87a15d3 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
@@ -170,7 +170,7 @@
return;
}
final int newSize = pageSize * multiplier;
- ByteBuffer oldBuffer = ((CachedPage)cPage).buffer;
+ ByteBuffer oldBuffer = ((CachedPage) cPage).buffer;
oldBuffer.position(0);
final int delta = multiplier - origMultiplier;
if (multiplier < origMultiplier) {
@@ -194,8 +194,7 @@
}
@Override
- public void fixupCapacityOnLargeRead(ICachedPageInternal cPage)
- throws HyracksDataException {
+ public void fixupCapacityOnLargeRead(ICachedPageInternal cPage) throws HyracksDataException {
ByteBuffer oldBuffer = ((CachedPage) cPage).buffer;
final int multiplier = cPage.getFrameSizeMultiplier();
final int newSize = pageSize * multiplier;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java
index 7380261..567c01e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java
@@ -17,7 +17,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
public interface IFIFOPageWriter {
public void write(ICachedPage page, BufferCache bufferCache) throws HyracksDataException;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
index d3bcce5..bbf3b45 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
@@ -39,8 +39,7 @@
public int getNumPages();
- void fixupCapacityOnLargeRead(ICachedPageInternal cPage)
- throws HyracksDataException;
+ void fixupCapacityOnLargeRead(ICachedPageInternal cPage) throws HyracksDataException;
public int getPageSize();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java
index bc69bc8..d86319d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java
@@ -18,24 +18,24 @@
*/
package org.apache.hyracks.storage.common.buffercache;
-public class QueueInfo implements IQueueInfo{
+public class QueueInfo implements IQueueInfo {
private final boolean poison;
private final boolean waiters;
- public QueueInfo(boolean waiters, boolean poison){
+ public QueueInfo(boolean waiters, boolean poison) {
this.waiters = waiters;
this.poison = poison;
}
@Override
- public boolean hasWaiters(){
+ public boolean hasWaiters() {
return waiters;
}
@Override
- public boolean isPoison(){
+ public boolean isPoison() {
return poison;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
index cd6ea2e..9e863eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
@@ -39,8 +39,8 @@
@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
- ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
- throws Exception {
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
+ ITupleReference prefixHighKey) throws Exception {
OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, false);
ctx.getIndex().create();
ctx.getIndex().activate();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
index 7378cf1..b7cf4a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
@@ -54,8 +54,8 @@
} else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, false, getRandom());
}
- int numTuplesPerDeleteRound = (int) Math
- .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+ int numTuplesPerDeleteRound =
+ (int) Math.ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
for (int j = 0; j < numDeleteRounds; j++) {
orderedIndexTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
orderedIndexTestUtils.checkPointSearches(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java
index 9a08401..8aa3f14 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java
@@ -45,8 +45,8 @@
@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
- ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
- throws Exception {
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
+ ITupleReference prefixHighKey) throws Exception {
OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, false);
ctx.getIndex().create();
ctx.getIndex().activate();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
index 3dac0db..c43d41f 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
@@ -62,8 +62,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
}
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Range search in [-1000, 1000]
ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
@@ -79,8 +79,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
}
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Range search in [50 0, 50 500]
ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
@@ -101,9 +101,9 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
}
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Range search in [50 100, 100 100]
ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
@@ -124,8 +124,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
}
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Range search in ["cbf", cc7"]
ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
@@ -142,8 +142,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
}
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Range search in ["cbf", "ddd", cc7", "eee"]
ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
@@ -164,9 +164,9 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
}
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Range search in ["cbf", "ddd", cc7", "eee"]
ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
index 4a9e0ed..bf3c8e5 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -58,8 +58,8 @@
private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
for (int i = 0; i < fieldSerdes.length; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i),
- actual.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i), actual.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Object actualObj = fieldSerdes[i].deserialize(dataIn);
if (!actualObj.equals(expected.getField(i))) {
@@ -99,20 +99,20 @@
MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor(false);
- RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp,
- highKeyCmp);
+ RangePredicate rangePred =
+ new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp, highKeyCmp);
ctx.getIndexAccessor().search(searchCursor, rangePred);
// Get the subset of elements from the expected set within given key
// range.
CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, ctx.getFieldSerdes(), lowKeyCmp.getKeyFieldCount());
- CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(),
- highKeyCmp.getKeyFieldCount());
+ CheckTuple highKeyCheck =
+ createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(), highKeyCmp.getKeyFieldCount());
SortedSet<CheckTuple> expectedSubset = null;
if (lowKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()
|| highKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()) {
// Searching on a key prefix (low key or high key or both).
- expectedSubset = getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck,
- highKeyCheck);
+ expectedSubset =
+ getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck, highKeyCheck);
} else {
// Searching on all key fields.
expectedSubset = ((TreeSet<CheckTuple>) ctx.getCheckTuples()).subSet(lowKeyCheck, lowKeyInclusive,
@@ -246,8 +246,8 @@
throws HyracksDataException {
int fieldCount = ctx.getFieldCount();
int numTuples = checkTuples.size();
- ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fieldCount + 1)
- : new ArrayTupleBuilder(fieldCount);
+ ArrayTupleBuilder tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fieldCount + 1) : new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
int c = 1;
@@ -298,7 +298,7 @@
// because we ignore duplicate keys.
ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
if (filtered) {
- addFilterField(ctx,minMax);
+ addFilterField(ctx, minMax);
}
} catch (HyracksDataException e) {
// Ignore duplicate key insertions.
@@ -476,8 +476,8 @@
}
@Override
- public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples,
- ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+ public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples, ISerializerDeserializer[] fieldSerdes,
+ int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
int actualCount = 0;
try {
while (cursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
index 2118f8c..3417066 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
@@ -45,8 +45,8 @@
@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
- ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
- throws Exception {
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
+ ITupleReference prefixHighKey) throws Exception {
OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, false);
ctx.getIndex().create();
ctx.getIndex().activate();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java
index 8a6996b..31c55b3 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java
@@ -19,7 +19,7 @@
package org.apache.hyracks.storage.am.common;
-@SuppressWarnings({"rawtypes", "unchecked"})
+@SuppressWarnings({ "rawtypes", "unchecked" })
public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
protected final int numKeys;
protected final Comparable[] fields;
@@ -101,7 +101,7 @@
StringBuilder strBuilder = new StringBuilder();
for (int i = 0; i < fields.length; i++) {
strBuilder.append(fields[i].toString());
- if (i != fields.length-1) {
+ if (i != fields.length - 1) {
strBuilder.append(" ");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java
index 6ac02e1..2c08ba0 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java
@@ -44,8 +44,8 @@
IndexAccessParameters actx =
new IndexAccessParameters(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
this.indexAccessor = index.createAccessor(actx);
- this.tupleBuilder = filtered ? new ArrayTupleBuilder(fieldSerdes.length + 1)
- : new ArrayTupleBuilder(fieldSerdes.length);
+ this.tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fieldSerdes.length + 1) : new ArrayTupleBuilder(fieldSerdes.length);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java
index 60d88e5..d796ece 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java
@@ -25,7 +25,7 @@
import org.apache.hyracks.storage.common.IModificationOperationCallback;
import org.apache.hyracks.storage.common.ISearchOperationCallback;
-public enum TestOperationCallback implements ISearchOperationCallback,IModificationOperationCallback {
+public enum TestOperationCallback implements ISearchOperationCallback, IModificationOperationCallback {
INSTANCE;
private static final int RANDOM_SEED = 50;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java
index f804f89..e4c4332 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java
@@ -21,7 +21,6 @@
import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
-
public class TestOperationSelector {
public static enum TestOperation {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
index 43258dd..f0b01a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -108,8 +108,8 @@
CheckTuple checkTuple = createCheckTuple(fieldSerdes.length, numKeys);
int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
for (int i = 0; i < fieldCount; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Comparable fieldObj = (Comparable) fieldSerdes[i].deserialize(dataIn);
checkTuple.appendField(fieldObj);
@@ -142,8 +142,8 @@
while (diskOrderCursor.hasNext()) {
diskOrderCursor.next();
ITupleReference tuple = diskOrderCursor.getTuple();
- CheckTuple checkTuple = createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(),
- ctx.getKeyFieldCount());
+ CheckTuple checkTuple =
+ createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(), ctx.getKeyFieldCount());
if (!checkDiskOrderScanResult(tuple, checkTuple, ctx)) {
fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
}
@@ -315,8 +315,8 @@
throws HyracksDataException {
int fieldCount = ctx.getFieldCount();
int numTuples = checkTuples.size();
- ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fieldCount + 1)
- : new ArrayTupleBuilder(fieldCount);
+ ArrayTupleBuilder tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fieldCount + 1) : new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
// Perform bulk load.
IIndexBulkLoader bulkLoader = ctx.getIndex().createBulkLoader(0.7f, false, numTuples, false);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
index dbd4bfc..986eccc 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
@@ -21,5 +21,6 @@
public interface IFieldValueGenerator<T> {
public T next();
+
public void reset();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
index 256eaf5..da7f52c 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
@@ -40,8 +40,7 @@
private List<String> firstNames = new ArrayList<>();
private List<String> lastNames = new ArrayList<>();
- public PersonNameFieldValueGenerator(Random rnd, double middleInitialProb)
- throws IOException {
+ public PersonNameFieldValueGenerator(Random rnd, double middleInitialProb) throws IOException {
this.rnd = rnd;
this.middleInitialProb = middleInitialProb;
initNames();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java
index c34c7bc..84cda9b 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java
@@ -31,7 +31,8 @@
private final TupleGenerator[] tupleGens;
public final AtomicBoolean inUse = new AtomicBoolean(false);
- public TupleBatch(int size, IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {
+ public TupleBatch(int size, IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes,
+ int payloadSize) {
this.size = size;
tupleGens = new TupleGenerator[size];
for (int i = 0; i < size; i++) {
@@ -40,7 +41,7 @@
}
public void generate() throws IOException {
- for(TupleGenerator tupleGen : tupleGens) {
+ for (TupleGenerator tupleGen : tupleGens) {
tupleGen.next();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java
index 4f26065..eb76e67 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java
@@ -27,7 +27,7 @@
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-@SuppressWarnings({"rawtypes", "unchecked" })
+@SuppressWarnings({ "rawtypes", "unchecked" })
public class TupleGenerator {
protected final ISerializerDeserializer[] fieldSerdes;
protected final IFieldValueGenerator[] fieldGens;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
index ce486fd..2d3289c 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
@@ -56,8 +56,8 @@
} else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
}
- int numTuplesPerDeleteRound = (int) Math
- .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+ int numTuplesPerDeleteRound =
+ (int) Math.ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
for (int j = 0; j < numDeleteRounds; j++) {
rTreeTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
rTreeTestUtils.checkScan(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
index 1f71889..9064225 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
@@ -71,8 +71,8 @@
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000, -1000
// and the top right coordinates are 1000, 1000
ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
@@ -92,8 +92,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0 and the top right coordinates are 1000.0, 1000.0
ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
@@ -115,13 +115,13 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 8;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
// 1000.0, 1000.0, 1000.0
- ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
- 1000.0);
+ ITupleReference key =
+ TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0, 1000.0);
runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RTREE);
}
@@ -143,8 +143,8 @@
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000, -1000
// and the top right coordinates are 1000, 1000
ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
@@ -170,8 +170,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0 and the top right coordinates are 1000.0, 1000.0
ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
@@ -199,13 +199,13 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 8;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
// 1000.0, 1000.0, 1000.0
- ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
- 1000.0);
+ ITupleReference key =
+ TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0, 1000.0);
runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RSTARTREE);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
index 300a1ff..f48da3a1 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -173,8 +173,8 @@
}
@Override
- public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples,
- ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+ public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples, ISerializerDeserializer[] fieldSerdes,
+ int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
int actualCount = 0;
try {
while (cursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
index 2d355da..95c308a 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
@@ -35,7 +35,7 @@
public static void main(String[] args) throws Exception {
// Disable logging so we can better see the output times.
Enumeration<String> loggers = LogManager.getLogManager().getLoggerNames();
- while(loggers.hasMoreElements()) {
+ while (loggers.hasMoreElements()) {
String loggerName = loggers.nextElement();
Logger logger = LogManager.getLogManager().getLogger(loggerName);
logger.setLevel(Level.OFF);
@@ -45,10 +45,12 @@
int batchSize = 10000;
int numBatches = numTuples / batchSize;
- ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, 30);
- IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
+ IBinaryComparatorFactory[] cmpFactories =
+ SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
runExperiment(numBatches, batchSize, 1024, 100000, fieldSerdes, cmpFactories, typeTraits);
runExperiment(numBatches, batchSize, 2048, 100000, fieldSerdes, cmpFactories, typeTraits);
@@ -61,7 +63,9 @@
runExperiment(numBatches, batchSize, 262144, 391, fieldSerdes, cmpFactories, typeTraits);
}
- private static void runExperiment(int numBatches, int batchSize, int pageSize, int numPages, ISerializerDeserializer[] fieldSerdes, IBinaryComparatorFactory[] cmpFactories, ITypeTraits[] typeTraits) throws Exception {
+ private static void runExperiment(int numBatches, int batchSize, int pageSize, int numPages,
+ ISerializerDeserializer[] fieldSerdes, IBinaryComparatorFactory[] cmpFactories, ITypeTraits[] typeTraits)
+ throws Exception {
System.out.println("PAGE SIZE: " + pageSize);
System.out.println("NUM PAGES: " + numPages);
System.out.println("MEMORY: " + (pageSize * numPages));
@@ -72,7 +76,8 @@
runner.init();
int numThreads = 1;
for (int i = 0; i < repeats; i++) {
- DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, 30, 50, 10, false);
+ DataGenThread dataGen =
+ new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, 30, 50, 10, false);
dataGen.start();
times[i] = runner.runExperiment(dataGen, numThreads);
System.out.println("TIME " + i + ": " + times[i] + "ms");
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
index 3f14f62..16d1208 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
@@ -41,7 +41,7 @@
logger.setLevel(Level.OFF);
}
boolean sorted = Boolean.parseBoolean(args[0]);
- int numThreads = Integer.parseInt(args[1]);
+ int numThreads = Integer.parseInt(args[1]);
//int numTuples = 100000; // 100K
//int numTuples = 1000000; // 1M
@@ -58,11 +58,12 @@
int numBatches = numTuples / batchSize;
int payLoadSize = 240;
- ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, payLoadSize);
- IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
- fieldSerdes.length);
+ IBinaryComparatorFactory[] cmpFactories =
+ SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
int[] bloomFilterKeyFields = new int[cmpFactories.length];
for (int i = 0; i < bloomFilterKeyFields.length; i++) {
bloomFilterKeyFields[i] = i;
@@ -73,8 +74,8 @@
int repeats = 1;
long[] times = new long[repeats];
-// int numThreads = 4;
-// boolean sorted = true;
+ // int numThreads = 4;
+ // boolean sorted = true;
for (int i = 0; i < repeats; i++) {
//ConcurrentSkipListRunner runner = new ConcurrentSkipListRunner(numBatches, batchSize, tupleSize, typeTraits, cmp);
//InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, 8192, 100000, typeTraits, cmpFactories);
@@ -90,7 +91,8 @@
int onDiskNumPages = 16384; // 2GB
LSMTreeRunner runner = new LSMTreeRunner(numBatches, inMemPageSize, inMemNumPages, onDiskPageSize,
onDiskNumPages, typeTraits, cmpFactories, bloomFilterKeyFields, bloomFilterFalsePositiveRate);
- DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, payLoadSize, 50, 10, sorted);
+ DataGenThread dataGen =
+ new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, payLoadSize, 50, 10, sorted);
dataGen.start();
runner.reset();
times[i] = runner.runExperiment(dataGen, numThreads);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
index 4c51520..c0d9bb8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
@@ -70,10 +70,11 @@
// Create and write tuple to bytes using an LSMBTreeTupleWriter.
LSMBTreeTupleWriter maxMatterTupleWriter =
new LSMBTreeTupleWriter(maxTypeTraits, numKeyFields, false, false);
- ITupleReference maxTuple = TupleUtils.createTuple(maxFieldSerdes, (Object[])maxFields);
+ ITupleReference maxTuple = TupleUtils.createTuple(maxFieldSerdes, (Object[]) maxFields);
ByteBuffer maxMatterBuf = writeTuple(maxTuple, maxMatterTupleWriter);
// Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
- LSMBTreeTupleReference maxLsmBTreeTuple = (LSMBTreeTupleReference) maxMatterTupleWriter.createTupleReference();
+ LSMBTreeTupleReference maxLsmBTreeTuple =
+ (LSMBTreeTupleReference) maxMatterTupleWriter.createTupleReference();
ISerializerDeserializer[] fieldSerdes = Arrays.copyOfRange(maxFieldSerdes, 0, numFields);
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
@@ -84,7 +85,7 @@
fields[j] = fieldGens[j].next();
}
// Create and write tuple to bytes using an LSMBTreeTupleWriter.
- ITupleReference tuple = TupleUtils.createTuple(fieldSerdes, (Object[])fields);
+ ITupleReference tuple = TupleUtils.createTuple(fieldSerdes, (Object[]) fields);
LSMBTreeTupleWriter matterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, false, false);
LSMBTreeTupleWriter antimatterTupleWriter =
new LSMBTreeTupleWriter(typeTraits, numKeyFields, true, false);
@@ -98,7 +99,8 @@
}
// Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
- LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) matterTupleWriter.createTupleReference();
+ LSMBTreeTupleReference lsmBTreeTuple =
+ (LSMBTreeTupleReference) matterTupleWriter.createTupleReference();
// Use LSMBTree tuple reference to interpret the written tuples.
// Repeat the block inside to test that repeated resetting to matter/antimatter tuples works.
@@ -145,7 +147,8 @@
}
}
- private void checkTuple(LSMBTreeTupleReference tuple, int expectedFieldCount, boolean expectedAntimatter, ISerializerDeserializer[] fieldSerdes, Object[] expectedFields) throws HyracksDataException {
+ private void checkTuple(LSMBTreeTupleReference tuple, int expectedFieldCount, boolean expectedAntimatter,
+ ISerializerDeserializer[] fieldSerdes, Object[] expectedFields) throws HyracksDataException {
assertEquals(expectedFieldCount, tuple.getFieldCount());
assertEquals(expectedAntimatter, tuple.isAntimatter());
Object[] deserMatterTuple = TupleUtils.deserializeTuple(tuple, fieldSerdes);
@@ -156,22 +159,20 @@
@Test
public void testLSMBTreeTuple() throws HyracksDataException {
- ISerializerDeserializer[] intFields = new IntegerSerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] intFields =
+ new IntegerSerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
testLSMBTreeTuple(intFields);
- ISerializerDeserializer[] stringFields = new ISerializerDeserializer[] {
+ ISerializerDeserializer[] stringFields = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
testLSMBTreeTuple(stringFields);
- ISerializerDeserializer[] mixedFields = new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] mixedFields = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE };
testLSMBTreeTuple(mixedFields);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java
index af37d80..6ec661e 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java
@@ -49,7 +49,7 @@
byte[] serFilter = filter.getByteArray();
LSMComponentFilterReference deserFilter = new LSMComponentFilterReference(
new TypeAwareTupleWriter((new ITypeTraits[] { IntegerPointable.TYPE_TRAITS })));
- deserFilter.set(serFilter,0,20);
+ deserFilter.set(serFilter, 0, 20);
Assert.assertTrue(deserFilter.isMaxTupleSet() && deserFilter.isMinTupleSet());
Assert.assertEquals(
TupleUtils.deserializeTuple(deserFilter.getMinTuple(),
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java
index f4ec55d..fd85824 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java
@@ -35,7 +35,8 @@
private final int NUM_PAGES = 100;
- private void testInMemoryFreePageManager(VirtualFreePageManager virtualFreePageManager) throws HyracksDataException {
+ private void testInMemoryFreePageManager(VirtualFreePageManager virtualFreePageManager)
+ throws HyracksDataException {
// The first two pages are reserved for the BTree's metadata page and
// root page.
int capacity = NUM_PAGES - 2;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
index 9ae9940..bfe70e6 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
@@ -40,6 +40,7 @@
}
public DataGenThread createDatagenThread(int numThreads, int numBatches, int batchSize) {
- return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, fieldGens, RANDOM_SEED, 2 * numThreads);
+ return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, fieldGens, RANDOM_SEED,
+ 2 * numThreads);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
index 6e764c3..b536ef2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
@@ -75,8 +75,8 @@
void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost) throws IOException {
HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false,
- false, tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
ArrayList<String> expectedGrams = new ArrayList<String>();
@@ -124,8 +124,8 @@
void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost) throws IOException {
HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
- tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
ArrayList<String> expectedGrams = new ArrayList<String>();
@@ -164,8 +164,8 @@
void runTestNGramTokenizerWithUTF8Tokens(boolean prePost) throws IOException {
UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
- tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
ArrayList<String> expectedGrams = new ArrayList<String>();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
index 78ba6a3..11a0b02 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
@@ -108,8 +108,8 @@
public void testWordTokenizerWithCountedHashedUTF8Tokens() throws IOException {
HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
- DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(false, false,
- tokenFactory);
+ DelimitedUTF8StringBinaryTokenizer tokenizer =
+ new DelimitedUTF8StringBinaryTokenizer(false, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
@@ -140,8 +140,8 @@
public void testWordTokenizerWithHashedUTF8Tokens() throws IOException {
HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
- DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
- tokenFactory);
+ DelimitedUTF8StringBinaryTokenizer tokenizer =
+ new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
@@ -172,8 +172,8 @@
public void testWordTokenizerWithUTF8Tokens() throws IOException {
UTF8WordTokenFactory tokenFactory = new UTF8WordTokenFactory();
- DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
- tokenFactory);
+ DelimitedUTF8StringBinaryTokenizer tokenizer =
+ new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java
index 3d6e3ef..e2a875b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java
@@ -38,8 +38,8 @@
public void testPrefixNames() throws HyracksDataException {
IODeviceHandle shorter = new IODeviceHandle(new File("/tmp/tst/1"), "storage");
IODeviceHandle longer = new IODeviceHandle(new File("/tmp/tst/11"), "storage");
- IOManager ioManager = new IOManager(Arrays.asList(new IODeviceHandle[] { shorter, longer }),
- new DefaultDeviceResolver());
+ IOManager ioManager =
+ new IOManager(Arrays.asList(new IODeviceHandle[] { shorter, longer }), new DefaultDeviceResolver());
FileReference f = ioManager.resolveAbsolutePath("/tmp/tst/11/storage/Foo_idx_foo/my_btree");
Assert.assertEquals("/tmp/tst/11/storage/Foo_idx_foo/my_btree", f.getAbsolutePath());
}
@@ -48,8 +48,8 @@
public void testDuplicates() throws HyracksDataException {
IODeviceHandle first = new IODeviceHandle(new File("/tmp/tst/1"), "storage");
IODeviceHandle second = new IODeviceHandle(new File("/tmp/tst/1"), "storage");
- IOManager ioManager = new IOManager(Arrays.asList(new IODeviceHandle[] { first, second }),
- new DefaultDeviceResolver());
+ IOManager ioManager =
+ new IOManager(Arrays.asList(new IODeviceHandle[] { first, second }), new DefaultDeviceResolver());
}
@After
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
index dbfe6f9..e4969f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
@@ -166,7 +166,7 @@
return bytes + " B";
}
final int baseValue = (63 - Long.numberOfLeadingZeros(bytes)) / 10;
- final char bytePrefix = " kMGTPE" .charAt(baseValue);
+ final char bytePrefix = " kMGTPE".charAt(baseValue);
final long divisor = 1L << (baseValue * 10);
if (bytes % divisor == 0) {
return String.format("%d %sB", bytes / divisor, bytePrefix);
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java
index 257daee..9f527e9 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java
@@ -152,7 +152,7 @@
break;
}
- j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
+ j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
int padSize = length - j;
if (padSize > 2) // something is wrong with base64. be safe and go with the upper bound
{
@@ -180,7 +180,7 @@
break;
}
- j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
+ j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
int padSize = length - j;
if (padSize > 2) // something is wrong with base64. be safe and go with the upper bound
{
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java
index 0e1c078..65557b1 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java
@@ -34,12 +34,8 @@
int i;
for (i = offset; remaining >= 3; remaining -= 3, i += 3) {
appendable.append(encode(input[i] >> 2));
- appendable.append(encode(
- ((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF)));
- appendable.append(encode(
- ((input[i + 1] & 0xF) << 2)
- | ((input[i + 2] >> 6) & 0x3)));
+ appendable.append(encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF)));
+ appendable.append(encode(((input[i + 1] & 0xF) << 2) | ((input[i + 2] >> 6) & 0x3)));
appendable.append(encode(input[i + 2] & 0x3F));
}
// encode when exactly 1 element (left) to encode
@@ -52,8 +48,7 @@
// encode when exactly 2 elements (left) to encode
if (remaining == 2) {
appendable.append(encode(input[i] >> 2));
- appendable.append(encode(((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF)));
+ appendable.append(encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF)));
appendable.append(encode((input[i + 1] & 0xF) << 2));
appendable.append('=');
}
@@ -73,12 +68,8 @@
int i;
for (i = offset; remaining >= 3; remaining -= 3, i += 3) {
buf[ptr++] = encode(input[i] >> 2);
- buf[ptr++] = encode(
- ((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF));
- buf[ptr++] = encode(
- ((input[i + 1] & 0xF) << 2)
- | ((input[i + 2] >> 6) & 0x3));
+ buf[ptr++] = encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF));
+ buf[ptr++] = encode(((input[i + 1] & 0xF) << 2) | ((input[i + 2] >> 6) & 0x3));
buf[ptr++] = encode(input[i + 2] & 0x3F);
}
// encode when exactly 1 element (left) to encode
@@ -91,8 +82,7 @@
// encode when exactly 2 elements (left) to encode
if (remaining == 2) {
buf[ptr++] = encode(input[i] >> 2);
- buf[ptr++] = encode(((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF));
+ buf[ptr++] = encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF));
buf[ptr++] = encode((input[i + 1] & 0xF) << 2);
buf[ptr++] = '=';
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java
index ba7276b..46bc0a4 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java
@@ -21,9 +21,7 @@
public class HexParser {
public static boolean isValidHexChar(char c) {
- if (c >= '0' && c <= '9'
- || c >= 'a' && c <= 'f'
- || c >= 'A' && c <= 'F') {
+ if (c >= '0' && c <= '9' || c >= 'a' && c <= 'f' || c >= 'A' && c <= 'F') {
return true;
}
return false;
@@ -79,19 +77,17 @@
}
}
- public static void generateByteArrayFromHexString(char[] input, int start, int length, byte[] output,
- int offset) {
+ public static void generateByteArrayFromHexString(char[] input, int start, int length, byte[] output, int offset) {
for (int i = 0; i < length; i += 2) {
- output[offset + i / 2] = (byte) ((getValueFromValidHexChar(input[start + i]) << 4) +
- getValueFromValidHexChar(input[start + i + 1]));
+ output[offset + i / 2] = (byte) ((getValueFromValidHexChar(input[start + i]) << 4)
+ + getValueFromValidHexChar(input[start + i + 1]));
}
}
- public static void generateByteArrayFromHexString(byte[] input, int start, int length, byte[] output,
- int offset) {
+ public static void generateByteArrayFromHexString(byte[] input, int start, int length, byte[] output, int offset) {
for (int i = 0; i < length; i += 2) {
- output[offset + i / 2] = (byte) ((getValueFromValidHexChar((char) input[start + i]) << 4) +
- getValueFromValidHexChar((char) input[start + i + 1]));
+ output[offset + i / 2] = (byte) ((getValueFromValidHexChar((char) input[start + i]) << 4)
+ + getValueFromValidHexChar((char) input[start + i + 1]));
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
index cd654d7..5d69448 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
@@ -541,8 +541,8 @@
if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) {
throw new UTFDataFormatException("malformed input around byte " + (count - 1));
}
- chararr[chararr_count++] = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6)
- | ((char3 & 0x3F) << 0));
+ chararr[chararr_count++] =
+ (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | ((char3 & 0x3F) << 0));
break;
default:
/* 10xx xxxx, 1111 xxxx */
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java
index 5313514..ea3793d 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java
@@ -38,8 +38,8 @@
protected static final Level TRACE_LOG_LEVEL = Level.INFO;
protected static final String CAT = "Tracer";
- protected static final ThreadLocal<DateFormat> DATE_FORMAT = ThreadLocal
- .withInitial(() -> new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"));
+ protected static final ThreadLocal<DateFormat> DATE_FORMAT =
+ ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"));
protected final Logger traceLog;
protected long categories;
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java
index c3443c8..a8762cd 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java
@@ -32,9 +32,9 @@
Random random = new Random(System.currentTimeMillis());
for (int i = 0; i < 31; i++) {
assertTrue(MathUtil.log2Floor((int) Math.pow(2, i)) == i);
- for(int x = 0; x < 10; x++){
+ for (int x = 0; x < 10; x++) {
float extra = random.nextFloat();
- while (extra >= 1.0){
+ while (extra >= 1.0) {
extra = random.nextFloat();
}
assertTrue(MathUtil.log2Floor((int) Math.pow(2, i + extra)) == i);
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java
index f200384..5a614f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java
@@ -92,7 +92,11 @@
return r2 == 0;
}
- enum OPTION {STANDARD, RAW_BYTE, LOWERCASE}
+ enum OPTION {
+ STANDARD,
+ RAW_BYTE,
+ LOWERCASE
+ }
public void testCompare(String str1, String str2, OPTION option) throws IOException {
byte[] buffer1 = writeStringToBytes(str1);
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index e03fb8a..a82355d 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -377,6 +377,21 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>net.revelc.code.formatter</groupId>
+ <artifactId>formatter-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>${source-format.goal}</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <configFile>${root.dir}/AsterixCodeFormatProfile.xml</configFile>
+ <skipFormatting>${source-format.skip}</skipFormatting>
+ </configuration>
+ </plugin>
</plugins>
<pluginManagement>
<plugins>
@@ -523,31 +538,6 @@
</properties>
</profile>
<profile>
- <id>source-format</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>net.revelc.code.formatter</groupId>
- <artifactId>formatter-maven-plugin</artifactId>
- <executions>
- <execution>
- <goals>
- <goal>${source-format.goal}</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <configFile>${root.dir}/AsterixCodeFormatProfile.xml</configFile>
- <skipFormatting>${source-format.skip}</skipFormatting>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
- <profile>
<id>skip-assembly</id>
<activation>
<file>
diff --git a/hyracks-fullstack/src/main/assembly/source.xml b/hyracks-fullstack/src/main/assembly/source.xml
index f5dd61f..4d668ce 100644
--- a/hyracks-fullstack/src/main/assembly/source.xml
+++ b/hyracks-fullstack/src/main/assembly/source.xml
@@ -24,24 +24,9 @@
<directory>${project.basedir}</directory>
<outputDirectory>/</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
- <includes>
- <include>**/LICENSE</include>
- <include>**/NOTICE</include>
- <include>**/README</include>
- <include>**/src/**</include>
- <include>**/results/**</include>
- <include>**/pom.xml</include>
- <include>**/build*.xml</include>
- <include>**/findbugs*.xml</include>
- <include>**/*.tbl</include>
- <include>**/*.tsv</include>
- <include>**/*.js</include>
- <include>**/*.txt</include>
- <include>**/*.piglet</include>
- <include>**/*.ddl</include>
- </includes>
<excludes>
- <exclude>**/${project.build.directory}/**</exclude>
+ <exclude>${project.build.directory}/**</exclude>
+ <exclude>release.properties</exclude>
</excludes>
</fileSet>
</fileSets>