[ASTERIXDB-2256] Reformat sources using code format template
Change-Id: I4faa141c1a8c9700d5e9ac50b839acc9d1eede73
Reviewed-on: https://asterix-gerrit.ics.uci.edu/2310
Tested-by: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Integration-Tests: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Contrib: Jenkins <jenkins@fulliautomatix.ics.uci.edu>
Reviewed-by: Murtadha Hubail <mhubail@apache.org>
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java
index aa9ac98..f9aef4c 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/ActiveManager.java
@@ -57,7 +57,7 @@
private volatile boolean shutdown;
public ActiveManager(ExecutorService executor, String nodeId, long activeMemoryBudget, int frameSize,
- INCServiceContext serviceCtx) throws HyracksDataException {
+ INCServiceContext serviceCtx) throws HyracksDataException {
this.executor = executor;
this.nodeId = nodeId;
this.activeFramePool = new ConcurrentFramePool(nodeId, activeMemoryBudget, frameSize);
@@ -115,18 +115,17 @@
if (runtime == null) {
LOGGER.warn("Request stats of a runtime that is not registered " + runtimeId);
// Send a failure message
- ((NodeControllerService) serviceCtx.getControllerService())
- .sendApplicationMessageToCC(message.getCcId(),
- JavaSerializationUtils
- .serialize(new ActiveStatsResponse(reqId, null,
- new RuntimeDataException(ErrorCode.ACTIVE_MANAGER_INVALID_RUNTIME,
- runtimeId.toString()))), null);
+ ((NodeControllerService) serviceCtx.getControllerService()).sendApplicationMessageToCC(
+ message.getCcId(),
+ JavaSerializationUtils.serialize(new ActiveStatsResponse(reqId, null, new RuntimeDataException(
+ ErrorCode.ACTIVE_MANAGER_INVALID_RUNTIME, runtimeId.toString()))),
+ null);
return;
}
String stats = runtime.getStats();
ActiveStatsResponse response = new ActiveStatsResponse(reqId, stats, null);
- ((NodeControllerService) serviceCtx.getControllerService())
- .sendApplicationMessageToCC(message.getCcId(), JavaSerializationUtils.serialize(response), null);
+ ((NodeControllerService) serviceCtx.getControllerService()).sendApplicationMessageToCC(message.getCcId(),
+ JavaSerializationUtils.serialize(response), null);
} catch (Exception e) {
throw HyracksDataException.create(e);
}
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java
index b5b07ff..e12c5ca 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/DeployedJobService.java
@@ -94,8 +94,7 @@
LOGGER.log(Level.INFO,
"Deployed Job execution completed for " + entityId.getExtensionName() + " " + entityId.getDataverse()
- + "."
- + entityId.getEntityName() + ". Took " + executionMilliseconds + " milliseconds ");
+ + "." + entityId.getEntityName() + ". Took " + executionMilliseconds + " milliseconds ");
return executionMilliseconds;
diff --git a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java
index 9e20e2f..5fe26bf 100644
--- a/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java
+++ b/asterixdb/asterix-active/src/main/java/org/apache/asterix/active/EntityId.java
@@ -55,8 +55,8 @@
return true;
}
EntityId other = (EntityId) o;
- return Objects.equals(other.dataverse, dataverse) && Objects.equals(other.entityName, entityName) &&
- Objects.equals(other.extensionName, extensionName);
+ return Objects.equals(other.dataverse, dataverse) && Objects.equals(other.entityName, entityName)
+ && Objects.equals(other.extensionName, extensionName);
}
@Override
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
index 2fd9079..11b86cb 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/BTreeSearchPOperator.java
@@ -123,8 +123,8 @@
// By nature, LEFT_OUTER_UNNEST_MAP should generate null values for non-matching tuples.
boolean retainMissing = op.getOperatorTag() == LogicalOperatorTag.LEFT_OUTER_UNNEST_MAP;
Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> btreeSearch = metadataProvider.buildBtreeRuntime(
- builder.getJobSpec(), opSchema, typeEnv, context, jobGenParams.getRetainInput(), retainMissing,
- dataset, jobGenParams.getIndexName(), lowKeyIndexes, highKeyIndexes, jobGenParams.isLowKeyInclusive(),
+ builder.getJobSpec(), opSchema, typeEnv, context, jobGenParams.getRetainInput(), retainMissing, dataset,
+ jobGenParams.getIndexName(), lowKeyIndexes, highKeyIndexes, jobGenParams.isLowKeyInclusive(),
jobGenParams.isHighKeyInclusive(), propagateFilter, minFilterFieldIndexes, maxFilterFieldIndexes);
builder.contributeHyracksOperator(unnestMap, btreeSearch.first);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
index 09092ff..f06fe45 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/algebra/operators/physical/CommitPOperator.java
@@ -84,8 +84,7 @@
int[] primaryKeyFields = JobGenHelper.variablesToFieldIndexes(primaryKeyLogicalVars, inputSchemas[0]);
//get dataset splits
- IPushRuntimeFactory runtime = dataset.getCommitRuntimeFactory(metadataProvider, primaryKeyFields,
- isSink);
+ IPushRuntimeFactory runtime = dataset.getCommitRuntimeFactory(metadataProvider, primaryKeyFields, isSink);
builder.contributeMicroOperator(op, runtime, recDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
index dabac3d..2300e4a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/compiler/provider/IRuleSetFactory.java
@@ -33,14 +33,12 @@
* @throws AlgebricksException
*/
public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getLogicalRewrites(
- ICcApplicationContext appCtx)
- throws AlgebricksException;
+ ICcApplicationContext appCtx) throws AlgebricksException;
/**
* @return the physical rewrites
*/
public List<Pair<AbstractRuleController, List<IAlgebraicRewriteRule>>> getPhysicalRewrites(
- ICcApplicationContext appCtx)
- throws AlgebricksException;
+ ICcApplicationContext appCtx) throws AlgebricksException;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
index 8dca64b..93c7d79 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/base/AnalysisUtil.java
@@ -110,8 +110,7 @@
if (expr.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
AbstractFunctionCallExpression fc = (AbstractFunctionCallExpression) expr;
FunctionIdentifier fid = fc.getFunctionIdentifier();
- if (fid.equals(BuiltinFunctions.FIELD_ACCESS_BY_INDEX)
- || fid.equals(BuiltinFunctions.FIELD_ACCESS_BY_NAME)
+ if (fid.equals(BuiltinFunctions.FIELD_ACCESS_BY_INDEX) || fid.equals(BuiltinFunctions.FIELD_ACCESS_BY_NAME)
|| fid.equals(BuiltinFunctions.FIELD_ACCESS_NESTED)) {
return true;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
index ca54904..5a70d3c 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/AsterixIntroduceGroupByCombinerRule.java
@@ -47,14 +47,14 @@
SelectOperator selectNonSystemNull;
if (aggregateVarsProducedByCombiner.size() == 1) {
- ILogicalExpression isSystemNullTest = new ScalarFunctionCallExpression(finfoEq,
- new MutableObject<ILogicalExpression>(new VariableReferenceExpression(
- aggregateVarsProducedByCombiner.get(0))));
+ ILogicalExpression isSystemNullTest =
+ new ScalarFunctionCallExpression(finfoEq, new MutableObject<ILogicalExpression>(
+ new VariableReferenceExpression(aggregateVarsProducedByCombiner.get(0))));
IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
- ScalarFunctionCallExpression nonSystemNullTest = new ScalarFunctionCallExpression(finfoNot,
- new MutableObject<ILogicalExpression>(isSystemNullTest));
- selectNonSystemNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonSystemNullTest), false,
- null);
+ ScalarFunctionCallExpression nonSystemNullTest =
+ new ScalarFunctionCallExpression(finfoNot, new MutableObject<ILogicalExpression>(isSystemNullTest));
+ selectNonSystemNull =
+ new SelectOperator(new MutableObject<ILogicalExpression>(nonSystemNullTest), false, null);
} else {
List<Mutable<ILogicalExpression>> isSystemNullTestList = new ArrayList<Mutable<ILogicalExpression>>();
for (LogicalVariable aggVar : aggregateVarsProducedByCombiner) {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
index 957e164..5cf6769 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByHandleFieldAccessRule.java
@@ -62,8 +62,8 @@
private static void byNameToByHandle(AssignOperator fieldAccessOp, IOptimizationContext context) {
Mutable<ILogicalOperator> opUnder = fieldAccessOp.getInputs().get(0);
- AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) fieldAccessOp.getExpressions().get(0)
- .getValue();
+ AbstractFunctionCallExpression fce =
+ (AbstractFunctionCallExpression) fieldAccessOp.getExpressions().get(0).getValue();
ILogicalExpression a1 = fce.getArguments().get(0).getValue();
VariableReferenceExpression x;
@@ -84,16 +84,16 @@
// let $t := type-of(x)
LogicalVariable t = context.newVar();
- AbstractFunctionCallExpression typeOf = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.TYPE_OF));
+ AbstractFunctionCallExpression typeOf =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.TYPE_OF));
typeOf.getArguments().add(new MutableObject<ILogicalExpression>(x));
AssignOperator typAssign = new AssignOperator(t, new MutableObject<ILogicalExpression>(typeOf));
typAssign.getInputs().add(opUnder);
// let $w := get-handle($t, path-expression)
LogicalVariable w = context.newVar();
- AbstractFunctionCallExpression getHandle = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.GET_HANDLE));
+ AbstractFunctionCallExpression getHandle =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.GET_HANDLE));
getHandle.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(t)));
// the accessed field
getHandle.getArguments().add(new MutableObject<ILogicalExpression>(fce.getArguments().get(1).getValue()));
@@ -101,8 +101,8 @@
handleAssign.getInputs().add(new MutableObject<ILogicalOperator>(typAssign));
// let $y := get-data(x, $w)
- AbstractFunctionCallExpression getData = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.GET_DATA));
+ AbstractFunctionCallExpression getData =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.GET_DATA));
VariableReferenceExpression ref2 = new VariableReferenceExpression(x.getVariableReference());
getData.getArguments().add(new MutableObject<ILogicalExpression>(ref2));
getData.getArguments().add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(w)));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
index 1ace6fa..19cd4b6 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ByNameToByIndexFieldAccessRule.java
@@ -148,8 +148,8 @@
if (k < 0) {
return null;
}
- return new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX), fce.getArguments().get(0),
+ return new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX),
+ fce.getArguments().get(0),
new MutableObject<>(new ConstantExpression(new AsterixConstantValue(new AInt32(k)))));
}
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
index cf926d1..6cc87ad 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CancelUnnestWithNestedListifyRule.java
@@ -186,8 +186,8 @@
return false;
}
- AbstractLogicalOperator nestedPlanRoot = (AbstractLogicalOperator) gby.getNestedPlans().get(0).getRoots().get(0)
- .getValue();
+ AbstractLogicalOperator nestedPlanRoot =
+ (AbstractLogicalOperator) gby.getNestedPlans().get(0).getRoots().get(0).getValue();
if (nestedPlanRoot.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java
index 9c90b7c..46c441a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/CountVarToCountOneRule.java
@@ -40,7 +40,8 @@
public class CountVarToCountOneRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -80,7 +81,8 @@
if (exp3.getExpressionTag() != LogicalExpressionTag.VARIABLE) {
return false;
}
- if (((AbstractLogicalOperator) agg.getInputs().get(0).getValue()).getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
+ if (((AbstractLogicalOperator) agg.getInputs().get(0).getValue())
+ .getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
return false;
}
fun.getArguments().get(0).setValue(new ConstantExpression(new AsterixConstantValue(new AInt64(1L))));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
index 7b84e98..85ee0ef 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/DisjunctivePredicateToJoinRule.java
@@ -127,8 +127,7 @@
ILogicalExpression cExp = new ConstantExpression(new AsterixConstantValue(list));
Mutable<ILogicalExpression> mutCExp = new MutableObject<>(cExp);
- IFunctionInfo scanFctInfo = BuiltinFunctions
- .getAsterixFunctionInfo(BuiltinFunctions.SCAN_COLLECTION);
+ IFunctionInfo scanFctInfo = BuiltinFunctions.getAsterixFunctionInfo(BuiltinFunctions.SCAN_COLLECTION);
UnnestingFunctionCallExpression scanExp = new UnnestingFunctionCallExpression(scanFctInfo, mutCExp);
LogicalVariable scanVar = context.newVar();
UnnestOperator unn = new UnnestOperator(scanVar, new MutableObject<>(scanExp));
@@ -186,9 +185,10 @@
return asSelectOperator(op.getValue());
}
- private static AbstractFunctionCallExpression asFunctionCallExpression(ILogicalExpression ex, FunctionIdentifier fi) {
- AbstractFunctionCallExpression fctCall = (ex.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL ? (AbstractFunctionCallExpression) ex
- : null);
+ private static AbstractFunctionCallExpression asFunctionCallExpression(ILogicalExpression ex,
+ FunctionIdentifier fi) {
+ AbstractFunctionCallExpression fctCall = (ex.getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL
+ ? (AbstractFunctionCallExpression) ex : null);
if (fctCall != null && (fi == null || fctCall.getFunctionIdentifier().equals(fi)))
return fctCall;
return null;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
index 6c97858..fef2323 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FeedScanCollectionToUnnest.java
@@ -63,9 +63,9 @@
UnnestOperator unnest = (UnnestOperator) op;
ILogicalExpression unnestExpr = unnest.getExpressionRef().getValue();
if (needsScanCollection(unnestExpr, op)) {
- ILogicalExpression newExpr = new UnnestingFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.SCAN_COLLECTION),
- new MutableObject<ILogicalExpression>(unnestExpr));
+ ILogicalExpression newExpr =
+ new UnnestingFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.SCAN_COLLECTION),
+ new MutableObject<ILogicalExpression>(unnestExpr));
unnest.getExpressionRef().setValue(newExpr);
context.addToDontApplySet(this, op);
return true;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FixReplicateOperatorOutputsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FixReplicateOperatorOutputsRule.java
index d9e58f3..c594ff8 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FixReplicateOperatorOutputsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FixReplicateOperatorOutputsRule.java
@@ -96,10 +96,10 @@
// when done with the whole plan, check that all replicate operators have been fixed
// if there is one that has not been completely fixed, it indicates that one "old" parent couldn't be found
- if (op.getOperatorTag() == LogicalOperatorTag.DISTRIBUTE_RESULT ||
- op.getOperatorTag() == LogicalOperatorTag.SINK ||
- (op.getOperatorTag() == LogicalOperatorTag.DELEGATE_OPERATOR &&
- ((DelegateOperator) op).getDelegate() instanceof CommitOperator)) {
+ if (op.getOperatorTag() == LogicalOperatorTag.DISTRIBUTE_RESULT
+ || op.getOperatorTag() == LogicalOperatorTag.SINK
+ || (op.getOperatorTag() == LogicalOperatorTag.DELEGATE_OPERATOR
+ && ((DelegateOperator) op).getDelegate() instanceof CommitOperator)) {
for (Map.Entry<AbstractReplicateOperator, MutableInt> entry : replicateOperators.entrySet()) {
if (entry.getKey().getOutputs().size() != entry.getValue().getValue()) {
throw new AlgebricksException(ErrorCode.ASTERIX, ErrorCode.COMPILATION_FAILED_DUE_TO_REPLICATE_OP);
@@ -109,8 +109,8 @@
}
// rewrite/fix only replicate operators
- if ((op.getOperatorTag() != LogicalOperatorTag.REPLICATE && op.getOperatorTag() != LogicalOperatorTag.SPLIT) ||
- context.checkIfInDontApplySet(this, opRef.getValue())) {
+ if ((op.getOperatorTag() != LogicalOperatorTag.REPLICATE && op.getOperatorTag() != LogicalOperatorTag.SPLIT)
+ || context.checkIfInDontApplySet(this, opRef.getValue())) {
return false;
}
AbstractReplicateOperator replicateOperator = (AbstractReplicateOperator) op;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckRule.java
index 8bd3d79..a93b732 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FullTextContainsParameterCheckRule.java
@@ -80,7 +80,6 @@
return modified;
}
-
/**
* This visitor class handles actual checking and transformation.
*/
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java
index 160e909..003cff0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/FuzzyJoinRule.java
@@ -263,8 +263,8 @@
AqlPlusExpressionToPlanTranslator translator = new AqlPlusExpressionToPlanTranslator(metadataProvider, counter);
context.setVarCounter(counter.get());
- LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(
- context, context);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context);
translator.addOperatorToMetaScope(new Identifier("#LEFT"), leftInputOp);
translator.addVariableToMetaScope(new Identifier("$$LEFT"), leftInputVar);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InjectTypeCastForSwitchCaseRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InjectTypeCastForSwitchCaseRule.java
index bb529d4..27b2712 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InjectTypeCastForSwitchCaseRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InjectTypeCastForSwitchCaseRule.java
@@ -100,9 +100,9 @@
if (TypeResolverUtil.needsCast(producedType, type)) {
ILogicalExpression argExpr = argRef.getValue();
// Injects a cast call to cast the data type to the produced type of the switch-case function call.
- ScalarFunctionCallExpression castFunc = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.CAST_TYPE),
- new ArrayList<>(Collections.singletonList(new MutableObject<>(argExpr))));
+ ScalarFunctionCallExpression castFunc =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CAST_TYPE),
+ new ArrayList<>(Collections.singletonList(new MutableObject<>(argExpr))));
TypeCastUtils.setRequiredAndInputTypes(castFunc, producedType, type);
argRef.setValue(castFunc);
rewritten = true;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java
index 09c98dd..9f1b968 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/InlineUnnestFunctionRule.java
@@ -65,8 +65,8 @@
return false;
}
UnnestOperator unnestOperator = (UnnestOperator) op1;
- AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOperator.getExpressionRef()
- .getValue();
+ AbstractFunctionCallExpression expr =
+ (AbstractFunctionCallExpression) unnestOperator.getExpressionRef().getValue();
//we only inline for the scan-collection function
if (expr.getFunctionIdentifier() != BuiltinFunctions.SCAN_COLLECTION) {
return false;
@@ -96,10 +96,11 @@
*/
private void inlineVariable(LogicalVariable usedVar, UnnestOperator unnestOp) throws AlgebricksException {
AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) unnestOp.getExpressionRef().getValue();
- List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList = new ArrayList<Pair<AbstractFunctionCallExpression, Integer>>();
+ List<Pair<AbstractFunctionCallExpression, Integer>> parentAndIndexList =
+ new ArrayList<Pair<AbstractFunctionCallExpression, Integer>>();
getParentFunctionExpression(usedVar, expr, parentAndIndexList);
- ILogicalExpression usedVarOrginExpr = findUsedVarOrigin(usedVar, unnestOp,
- (AbstractLogicalOperator) unnestOp.getInputs().get(0).getValue());
+ ILogicalExpression usedVarOrginExpr =
+ findUsedVarOrigin(usedVar, unnestOp, (AbstractLogicalOperator) unnestOp.getInputs().get(0).getValue());
if (usedVarOrginExpr != null) {
for (Pair<AbstractFunctionCallExpression, Integer> parentAndIndex : parentAndIndexList) {
//we only rewrite the top scan-collection function
@@ -160,8 +161,8 @@
}
} else {
for (Mutable<ILogicalOperator> child : currentOp.getInputs()) {
- ILogicalExpression expr = findUsedVarOrigin(usedVar, currentOp,
- (AbstractLogicalOperator) child.getValue());
+ ILogicalExpression expr =
+ findUsedVarOrigin(usedVar, currentOp, (AbstractLogicalOperator) child.getValue());
if (expr != null) {
ret = expr;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
index a24c6e0..c3e036e 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceAutogenerateIDRule.java
@@ -133,8 +133,8 @@
return false;
}
- List<String> pkFieldName = ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).getPrimaryKey()
- .get(0);
+ List<String> pkFieldName =
+ ((InternalDatasetDetails) dds.getDataset().getDatasetDetails()).getPrimaryKey().get(0);
ILogicalExpression rec0 = new VariableReferenceExpression(inputRecord);
ILogicalExpression rec1 = createPrimaryKeyRecordExpression(pkFieldName);
ILogicalExpression mergedRec = createRecordMergeFunction(rec0, rec1);
@@ -160,15 +160,15 @@
private ILogicalExpression createNotNullFunction(ILogicalExpression mergedRec) {
List<Mutable<ILogicalExpression>> args = new ArrayList<>();
args.add(new MutableObject<ILogicalExpression>(mergedRec));
- AbstractFunctionCallExpression notNullFn = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.CHECK_UNKNOWN), args);
+ AbstractFunctionCallExpression notNullFn =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CHECK_UNKNOWN), args);
return notNullFn;
}
private AbstractFunctionCallExpression createPrimaryKeyRecordExpression(List<String> pkFieldName) {
//Create lowest level of nested uuid
- AbstractFunctionCallExpression uuidFn = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_UUID));
+ AbstractFunctionCallExpression uuidFn =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_UUID));
List<Mutable<ILogicalExpression>> openRecordConsArgs = new ArrayList<>();
Mutable<ILogicalExpression> pkFieldNameExpression = new MutableObject<ILogicalExpression>(
new ConstantExpression(new AsterixConstantValue(new AString(pkFieldName.get(pkFieldName.size() - 1)))));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
index bb39993..5644a6c 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastForExternalFunctionRule.java
@@ -94,8 +94,8 @@
}
boolean castFlag = !IntroduceDynamicTypeCastRule.compatible(requiredRecordType, inputRecordType);
if (castFlag || checkUnknown) {
- AbstractFunctionCallExpression castFunc = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.CAST_TYPE));
+ AbstractFunctionCallExpression castFunc =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CAST_TYPE));
castFunc.getArguments().add(funcCallExpr.getArguments().get(iter1));
TypeCastUtils.setRequiredAndInputTypes(castFunc, requiredRecordType, inputRecordType);
funcCallExpr.getArguments().set(iter1, new MutableObject<>(castFunc));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
index 075a65c..2695337 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceDynamicTypeCastRule.java
@@ -167,8 +167,7 @@
boolean cast = !compatible(requiredRecordType, inputRecordType);
if (checkUnknown) {
- recordVar = addWrapperFunction(requiredRecordType, recordVar, op, context,
- BuiltinFunctions.CHECK_UNKNOWN);
+ recordVar = addWrapperFunction(requiredRecordType, recordVar, op, context, BuiltinFunctions.CHECK_UNKNOWN);
}
if (cast) {
addWrapperFunction(requiredRecordType, recordVar, op, context, BuiltinFunctions.CAST_TYPE);
@@ -208,15 +207,15 @@
if (var.equals(recordVar)) {
/** insert an assign operator to call the function on-top-of the variable */
IAType actualType = (IAType) env.getVarType(var);
- AbstractFunctionCallExpression cast = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(fd));
+ AbstractFunctionCallExpression cast =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(fd));
cast.getArguments()
.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(var)));
/** enforce the required record type */
TypeCastUtils.setRequiredAndInputTypes(cast, requiredRecordType, actualType);
LogicalVariable newAssignVar = context.newVar();
- AssignOperator newAssignOperator = new AssignOperator(newAssignVar,
- new MutableObject<ILogicalExpression>(cast));
+ AssignOperator newAssignOperator =
+ new AssignOperator(newAssignVar, new MutableObject<ILogicalExpression>(cast));
newAssignOperator.getInputs().add(new MutableObject<ILogicalOperator>(op));
opRef.setValue(newAssignOperator);
context.computeAndSetTypeEnvironmentForOperator(newAssignOperator);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
index 3ff2a1b..0c44ba0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceEnforcedListTypeRule.java
@@ -48,7 +48,8 @@
public class IntroduceEnforcedListTypeRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
index 5518325..0fa4d30 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceMaterializationForInsertWithSelfScanRule.java
@@ -58,16 +58,16 @@
}
InsertDeleteUpsertOperator insertOp = (InsertDeleteUpsertOperator) op;
- boolean sameDataset = checkIfInsertAndScanDatasetsSame(op, ((DatasetDataSource) insertOp.getDataSource())
- .getDataset().getDatasetName());
+ boolean sameDataset = checkIfInsertAndScanDatasetsSame(op,
+ ((DatasetDataSource) insertOp.getDataSource()).getDataset().getDatasetName());
if (sameDataset) {
MaterializeOperator materializeOperator = new MaterializeOperator();
MaterializePOperator materializePOperator = new MaterializePOperator(true);
materializeOperator.setPhysicalOperator(materializePOperator);
- materializeOperator.getInputs().add(
- new MutableObject<ILogicalOperator>(insertOp.getInputs().get(0).getValue()));
+ materializeOperator.getInputs()
+ .add(new MutableObject<ILogicalOperator>(insertOp.getInputs().get(0).getValue()));
context.computeAndSetTypeEnvironmentForOperator(materializeOperator);
insertOp.getInputs().clear();
@@ -105,8 +105,7 @@
} else if (descendantOp.getOperatorTag() == LogicalOperatorTag.DATASOURCESCAN) {
DataSourceScanOperator dataSourceScanOp = (DataSourceScanOperator) descendantOp;
DataSource ds = (DataSource) dataSourceScanOp.getDataSource();
- if ((ds.getDatasourceType() == Type.INTERNAL_DATASET
- || ds.getDatasourceType() == Type.EXTERNAL_DATASET)
+ if ((ds.getDatasourceType() == Type.INTERNAL_DATASET || ds.getDatasourceType() == Type.EXTERNAL_DATASET)
&& ((DatasetDataSource) ds).getDataset().getDatasetName().compareTo(insertDatasetName) == 0) {
return true;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
index f655b24..1097d25 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceRapidFrameFlushProjectAssignRule.java
@@ -45,7 +45,8 @@
public class IntroduceRapidFrameFlushProjectAssignRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
index 3b6a959..6d53c13 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceSecondaryIndexInsertDeleteRule.java
@@ -107,12 +107,12 @@
return false;
}
/** find the record variable */
- InsertDeleteUpsertOperator primaryIndexModificationOp = (InsertDeleteUpsertOperator) op0.getInputs().get(0)
- .getValue();
+ InsertDeleteUpsertOperator primaryIndexModificationOp =
+ (InsertDeleteUpsertOperator) op0.getInputs().get(0).getValue();
boolean isBulkload = primaryIndexModificationOp.isBulkload();
ILogicalExpression newRecordExpr = primaryIndexModificationOp.getPayloadExpression().getValue();
- List<Mutable<ILogicalExpression>> newMetaExprs = primaryIndexModificationOp
- .getAdditionalNonFilteringExpressions();
+ List<Mutable<ILogicalExpression>> newMetaExprs =
+ primaryIndexModificationOp.getAdditionalNonFilteringExpressions();
LogicalVariable newRecordVar;
LogicalVariable newMetaVar = null;
@@ -120,8 +120,8 @@
* inputOp is the assign operator which extracts primary keys from the input
* variables (record or meta)
*/
- AbstractLogicalOperator inputOp = (AbstractLogicalOperator) primaryIndexModificationOp.getInputs().get(0)
- .getValue();
+ AbstractLogicalOperator inputOp =
+ (AbstractLogicalOperator) primaryIndexModificationOp.getInputs().get(0).getValue();
newRecordVar = getRecordVar(context, inputOp, newRecordExpr, 0);
if (newMetaExprs != null && !newMetaExprs.isEmpty()) {
if (newMetaExprs.size() > 1) {
@@ -280,8 +280,8 @@
IndexInsertDeleteUpsertOperator indexUpdate;
if (index.getIndexType() != IndexType.RTREE) {
// Create an expression per key
- Mutable<ILogicalExpression> filterExpression = (primaryIndexModificationOp
- .getOperation() == Kind.UPSERT) ? null
+ Mutable<ILogicalExpression> filterExpression =
+ (primaryIndexModificationOp.getOperation() == Kind.UPSERT) ? null
: createFilterExpression(secondaryKeyVars, context.getOutputTypeEnvironment(currentTop),
index.isOverridingKeyFieldTypes());
DataSourceIndex dataSourceIndex = new DataSourceIndex(index, dataverseName, datasetName, mp);
@@ -371,8 +371,8 @@
Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0),
secondaryKeyFields.get(0), recType);
IAType spatialType = keyPairType.first;
- boolean isPointMBR = spatialType.getTypeTag() == ATypeTag.POINT
- || spatialType.getTypeTag() == ATypeTag.POINT3D;
+ boolean isPointMBR =
+ spatialType.getTypeTag() == ATypeTag.POINT || spatialType.getTypeTag() == ATypeTag.POINT3D;
int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numKeys = (isPointMBR && isBulkload) ? dimension : dimension * 2;
// Get variables and expressions
@@ -381,8 +381,8 @@
for (int i = 0; i < numKeys; i++) {
LogicalVariable keyVar = context.newVar();
keyVarList.add(keyVar);
- AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
+ AbstractFunctionCallExpression createMBR =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(
new VariableReferenceExpression(secondaryKeyVars.get(0))));
createMBR.getArguments().add(new MutableObject<ILogicalExpression>(
@@ -574,8 +574,8 @@
// make handling of records with incorrect value type for this field easier and cleaner
context.addNotToBeInlinedVar(fieldVar);
// create field access
- AbstractFunctionCallExpression fieldAccessFunc = getOpenOrNestedFieldAccessFunction(varRef,
- indexFieldId.fieldName);
+ AbstractFunctionCallExpression fieldAccessFunc =
+ getOpenOrNestedFieldAccessFunction(varRef, indexFieldId.fieldName);
// create cast
theFieldAccessFunc = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(
index.isEnforced() ? BuiltinFunctions.CAST_TYPE : BuiltinFunctions.CAST_TYPE_LAX));
@@ -587,9 +587,9 @@
int pos = indexFieldId.fieldName.size() > 1 ? -1
: sourceType.getFieldIndex(indexFieldId.fieldName.get(0));
// Field not found --> This is either an open field or a nested field. it can't be accessed by index
- theFieldAccessFunc = (pos == -1)
- ? getOpenOrNestedFieldAccessFunction(varRef, indexFieldId.fieldName)
- : getClosedFieldAccessFunction(varRef, pos);
+ theFieldAccessFunc =
+ (pos == -1) ? getOpenOrNestedFieldAccessFunction(varRef, indexFieldId.fieldName)
+ : getClosedFieldAccessFunction(varRef, pos);
}
vars.add(fieldVar);
exprs.add(new MutableObject<ILogicalExpression>(theFieldAccessFunc));
@@ -619,10 +619,10 @@
private static AbstractFunctionCallExpression getClosedFieldAccessFunction(Mutable<ILogicalExpression> varRef,
int position) {
- Mutable<ILogicalExpression> indexRef = new MutableObject<>(
- new ConstantExpression(new AsterixConstantValue(new AInt32(position))));
- return new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX), varRef, indexRef);
+ Mutable<ILogicalExpression> indexRef =
+ new MutableObject<>(new ConstantExpression(new AsterixConstantValue(new AInt32(position))));
+ return new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_INDEX),
+ varRef, indexRef);
}
private static AbstractFunctionCallExpression getOpenOrNestedFieldAccessFunction(Mutable<ILogicalExpression> varRef,
@@ -632,14 +632,14 @@
IAObject fieldList = stringListToAOrderedList(fields);
Mutable<ILogicalExpression> fieldRef = constantToMutableLogicalExpression(fieldList);
// Create an expression for the nested case
- func = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_NESTED), varRef, fieldRef);
+ func = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_NESTED),
+ varRef, fieldRef);
} else {
IAObject fieldList = new AString(fields.get(0));
Mutable<ILogicalExpression> fieldRef = constantToMutableLogicalExpression(fieldList);
// Create an expression for the open field case (By name)
- func = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_NAME), varRef, fieldRef);
+ func = new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.FIELD_ACCESS_BY_NAME),
+ varRef, fieldRef);
}
return func;
}
@@ -666,12 +666,12 @@
if (!NonTaggedFormatUtil.isOptional(secondaryKeyType) && !forceFilter) {
continue;
}
- ScalarFunctionCallExpression isUnknownFuncExpr = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.IS_UNKNOWN),
- new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
- ScalarFunctionCallExpression notFuncExpr = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT),
- new MutableObject<ILogicalExpression>(isUnknownFuncExpr));
+ ScalarFunctionCallExpression isUnknownFuncExpr =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.IS_UNKNOWN),
+ new MutableObject<ILogicalExpression>(new VariableReferenceExpression(secondaryKeyVar)));
+ ScalarFunctionCallExpression notFuncExpr =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT),
+ new MutableObject<ILogicalExpression>(isUnknownFuncExpr));
filterExpressions.add(new MutableObject<ILogicalExpression>(notFuncExpr));
}
// No nullable secondary keys.
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
index 0c23ace..c5cb6ae 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/IntroduceTransactionCommitByAssignOpRule.java
@@ -36,7 +36,8 @@
public class IntroduceTransactionCommitByAssignOpRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -61,8 +62,8 @@
//create an assignOp with a variable and the condition of the select-operator.
LogicalVariable v = context.newVar();
- AssignOperator assignOperator = new AssignOperator(v, new MutableObject<ILogicalExpression>(selectOperator
- .getCondition().getValue()));
+ AssignOperator assignOperator =
+ new AssignOperator(v, new MutableObject<ILogicalExpression>(selectOperator.getCondition().getValue()));
//set the input of the new assign-operator to the input of the select-operator.
assignOperator.getInputs().add(childOfSelect);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ListifyUnnestingFunctionRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ListifyUnnestingFunctionRule.java
index 8e35ffb..2fe6460 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ListifyUnnestingFunctionRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/ListifyUnnestingFunctionRule.java
@@ -118,14 +118,13 @@
// Listify the dataset into one collection.
LogicalVariable aggVar = context.newVar();
Mutable<ILogicalExpression> aggArgExprRef = new MutableObject<>(new VariableReferenceExpression(unnestVar));
- ILogicalExpression aggExpr = new AggregateFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.LISTIFY), false, new ArrayList<>(
- Collections.singletonList(aggArgExprRef)));
+ ILogicalExpression aggExpr =
+ new AggregateFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.LISTIFY), false,
+ new ArrayList<>(Collections.singletonList(aggArgExprRef)));
AggregateOperator aggregateOperator = new AggregateOperator(new ArrayList<>(Collections.singletonList(aggVar)),
new ArrayList<>(Collections.singletonList(new MutableObject<>(aggExpr))));
aggregateOperator.getInputs().add(new MutableObject<>(unnestOperator));
-
// Adds the aggregate operator as the root of the subplan.
subplanOperator.setRootOp(new MutableObject<>(aggregateOperator));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
index 7336181..d6581a2 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/LoadRecordFieldsRule.java
@@ -310,16 +310,16 @@
AssignOperator op2 = (AssignOperator) opChild;
int i = op2.getVariables().indexOf(recordVar);
if (i >= 0) {
- AbstractLogicalExpression constr = (AbstractLogicalExpression) op2.getExpressions().get(i)
- .getValue();
+ AbstractLogicalExpression constr =
+ (AbstractLogicalExpression) op2.getExpressions().get(i).getValue();
return resolveFieldExpression(constr, accessKey, typeEnvironment, resolver);
}
} else if (opChild.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
NestedTupleSourceOperator nts = (NestedTupleSourceOperator) opChild;
- AbstractLogicalOperator opBelowNestedPlan = (AbstractLogicalOperator) nts.getDataSourceReference()
- .getValue().getInputs().get(0).getValue();
- ILogicalExpression expr1 = findFieldExpression(opBelowNestedPlan, recordVar, accessKey, typeEnvironment,
- resolver);
+ AbstractLogicalOperator opBelowNestedPlan =
+ (AbstractLogicalOperator) nts.getDataSourceReference().getValue().getInputs().get(0).getValue();
+ ILogicalExpression expr1 =
+ findFieldExpression(opBelowNestedPlan, recordVar, accessKey, typeEnvironment, resolver);
if (expr1 != null) {
return expr1;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java
index 84567d7..78399a8 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/NestGroupByRule.java
@@ -47,7 +47,8 @@
public class NestGroupByRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
index abb87757..cf5088b 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggFuncIntoStandaloneAggregateRule.java
@@ -54,7 +54,8 @@
public class PushAggFuncIntoStandaloneAggregateRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -98,8 +99,7 @@
continue;
}
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
- FunctionIdentifier funcIdent = BuiltinFunctions.getAggregateFunction(funcExpr
- .getFunctionIdentifier());
+ FunctionIdentifier funcIdent = BuiltinFunctions.getAggregateFunction(funcExpr.getFunctionIdentifier());
if (funcIdent == null) {
// Recursively look in func args.
if (containsAggregate(funcExpr.getArguments())) {
@@ -169,8 +169,8 @@
return applied;
}
- private boolean pushAggregateFunction(AggregateOperator aggOp, AssignOperator assignOp, IOptimizationContext context)
- throws AlgebricksException {
+ private boolean pushAggregateFunction(AggregateOperator aggOp, AssignOperator assignOp,
+ IOptimizationContext context) throws AlgebricksException {
Mutable<ILogicalOperator> opRef3 = aggOp.getInputs().get(0);
AbstractLogicalOperator op3 = (AbstractLogicalOperator) opRef3.getValue();
// If there's a group by below the agg, then we want to have the agg pushed into the group by
@@ -204,23 +204,23 @@
return false;
}
- AbstractFunctionCallExpression aggOpExpr = (AbstractFunctionCallExpression) aggOp.getExpressions().get(0)
- .getValue();
+ AbstractFunctionCallExpression aggOpExpr =
+ (AbstractFunctionCallExpression) aggOp.getExpressions().get(0).getValue();
aggOp.getExpressions().clear();
aggOp.getVariables().clear();
for (Mutable<ILogicalExpression> srcAssignExprRef : srcAssignExprRefs) {
- AbstractFunctionCallExpression assignFuncExpr = (AbstractFunctionCallExpression) srcAssignExprRef
- .getValue();
- FunctionIdentifier aggFuncIdent = BuiltinFunctions.getAggregateFunction(assignFuncExpr
- .getFunctionIdentifier());
+ AbstractFunctionCallExpression assignFuncExpr =
+ (AbstractFunctionCallExpression) srcAssignExprRef.getValue();
+ FunctionIdentifier aggFuncIdent =
+ BuiltinFunctions.getAggregateFunction(assignFuncExpr.getFunctionIdentifier());
// Push the agg func into the agg op.
List<Mutable<ILogicalExpression>> aggArgs = new ArrayList<Mutable<ILogicalExpression>>();
aggArgs.add(aggOpExpr.getArguments().get(0));
- AggregateFunctionCallExpression aggFuncExpr = BuiltinFunctions.makeAggregateFunctionExpression(
- aggFuncIdent, aggArgs);
+ AggregateFunctionCallExpression aggFuncExpr =
+ BuiltinFunctions.makeAggregateFunctionExpression(aggFuncIdent, aggArgs);
LogicalVariable newVar = context.newVar();
aggOp.getVariables().add(newVar);
aggOp.getExpressions().add(new MutableObject<ILogicalExpression>(aggFuncExpr));
@@ -249,8 +249,7 @@
continue;
}
AbstractFunctionCallExpression funcExpr = (AbstractFunctionCallExpression) expr;
- FunctionIdentifier funcIdent = BuiltinFunctions.getAggregateFunction(funcExpr
- .getFunctionIdentifier());
+ FunctionIdentifier funcIdent = BuiltinFunctions.getAggregateFunction(funcExpr.getFunctionIdentifier());
if (funcIdent == null) {
// Recursively look in func args.
if (fingAggFuncExprRef(funcExpr.getArguments(), aggVar, srcAssignExprRefs) == false) {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java
index d69dd3b..ccf7ccb 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushAggregateIntoNestedSubplanRule.java
@@ -64,8 +64,8 @@
Map<LogicalVariable, AbstractOperatorWithNestedPlans> nspWithAgg = new HashMap<>();
Map<ILogicalExpression, ILogicalExpression> aggExprToVarExpr = new HashMap<>();
// first collect vars. referring to listified sequences
- boolean changed = collectVarsBottomUp(opRef, context, nspAggVars, nspWithAgg, nspAggVarToPlanIndex,
- aggExprToVarExpr);
+ boolean changed =
+ collectVarsBottomUp(opRef, context, nspAggVars, nspWithAgg, nspAggVarToPlanIndex, aggExprToVarExpr);
if (changed) {
removeRedundantListifies(nspAggVars, nspWithAgg, nspAggVarToPlanIndex);
}
@@ -147,8 +147,8 @@
if (op1.getOperatorTag() == LogicalOperatorTag.ASSIGN) {
AssignOperator assign = (AssignOperator) op1;
for (Mutable<ILogicalExpression> exprRef : assign.getExpressions()) {
- Pair<Boolean, ILogicalExpression> p = extractAggFunctionsFromExpression(exprRef, nspWithAgg,
- aggregateExprToVarExpr, context);
+ Pair<Boolean, ILogicalExpression> p =
+ extractAggFunctionsFromExpression(exprRef, nspWithAgg, aggregateExprToVarExpr, context);
if (p.first) {
change = true;
exprRef.setValue(p.second);
@@ -158,8 +158,8 @@
if (op1.getOperatorTag() == LogicalOperatorTag.SELECT) {
SelectOperator select = (SelectOperator) op1;
Mutable<ILogicalExpression> exprRef = select.getCondition();
- Pair<Boolean, ILogicalExpression> p = extractAggFunctionsFromExpression(exprRef, nspWithAgg,
- aggregateExprToVarExpr, context);
+ Pair<Boolean, ILogicalExpression> p =
+ extractAggFunctionsFromExpression(exprRef, nspWithAgg, aggregateExprToVarExpr, context);
if (p.first) {
change = true;
exprRef.setValue(p.second);
@@ -283,8 +283,8 @@
if (nspOp != null) {
if (!aggregateExprToVarExpr.containsKey(expr)) {
LogicalVariable newVar = context.newVar();
- AggregateFunctionCallExpression aggFun = BuiltinFunctions
- .makeAggregateFunctionExpression(fi, fce.getArguments());
+ AggregateFunctionCallExpression aggFun =
+ BuiltinFunctions.makeAggregateFunctionExpression(fi, fce.getArguments());
rewriteAggregateInNestedSubplan(argVar, nspOp, aggFun, newVar, context);
ILogicalExpression newVarExpr = new VariableReferenceExpression(newVar);
aggregateExprToVarExpr.put(expr, newVarExpr);
@@ -299,8 +299,8 @@
boolean change = false;
for (Mutable<ILogicalExpression> a : fce.getArguments()) {
- Pair<Boolean, ILogicalExpression> aggArg = extractAggFunctionsFromExpression(a, nspWithAgg,
- aggregateExprToVarExpr, context);
+ Pair<Boolean, ILogicalExpression> aggArg =
+ extractAggFunctionsFromExpression(a, nspWithAgg, aggregateExprToVarExpr, context);
if (aggArg.first.booleanValue()) {
a.setValue(aggArg.second);
change = true;
@@ -324,8 +324,8 @@
for (int i = 0; i < n; i++) {
LogicalVariable v = aggOp.getVariables().get(i);
if (v.equals(oldAggVar)) {
- AbstractFunctionCallExpression oldAggExpr = (AbstractFunctionCallExpression) aggOp.getExpressions()
- .get(i).getValue();
+ AbstractFunctionCallExpression oldAggExpr =
+ (AbstractFunctionCallExpression) aggOp.getExpressions().get(i).getValue();
AggregateFunctionCallExpression newAggFun = BuiltinFunctions
.makeAggregateFunctionExpression(aggFun.getFunctionIdentifier(), new ArrayList<>());
for (Mutable<ILogicalExpression> arg : oldAggExpr.getArguments()) {
@@ -468,8 +468,8 @@
int n = nspAgg.getVariables().size();
for (int i = 0; i < n; i++) {
if (nspAgg.getVariables().get(i).equals(varFromNestedAgg)) {
- AbstractFunctionCallExpression fce = (AbstractFunctionCallExpression) nspAgg.getExpressions().get(i)
- .getValue();
+ AbstractFunctionCallExpression fce =
+ (AbstractFunctionCallExpression) nspAgg.getExpressions().get(i).getValue();
if (fce.getFunctionIdentifier().equals(BuiltinFunctions.LISTIFY)) {
ILogicalExpression argExpr = fce.getArguments().get(0).getValue();
if (argExpr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java
index 903f49e..48c4324 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushGroupByThroughProduct.java
@@ -77,8 +77,10 @@
}
GroupByOperator gby = (GroupByOperator) op1;
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorToPush = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorNotToPush = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorToPush =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorNotToPush =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
Mutable<ILogicalOperator> opLeftRef = join.getInputs().get(0);
ILogicalOperator opLeft = opLeftRef.getValue();
@@ -110,7 +112,7 @@
private void push(Mutable<ILogicalOperator> opRefGby, Mutable<ILogicalOperator> opRefJoin, int branch,
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorToPush,
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorNotToPush, IOptimizationContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
GroupByOperator gby = (GroupByOperator) opRefGby.getValue();
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) opRefJoin.getValue();
gby.getDecorList().clear();
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java
index ada4a57..1734f34 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/PushProperJoinThroughProduct.java
@@ -42,7 +42,8 @@
private List<LogicalVariable> productLeftBranchVars = new ArrayList<LogicalVariable>();
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveLeftOuterUnnestForLeftOuterJoinRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveLeftOuterUnnestForLeftOuterJoinRule.java
index b720714..1d7a61a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveLeftOuterUnnestForLeftOuterJoinRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveLeftOuterUnnestForLeftOuterJoinRule.java
@@ -88,8 +88,8 @@
LeftOuterJoinOperator lojOperator = (LeftOuterJoinOperator) gbyOperator.getInputs().get(0).getValue();
// Checks whether the left outer unnest and the group-by operator are qualified for rewriting.
- Triple<Boolean, ILogicalExpression, ILogicalExpression> checkGbyResult = checkUnnestAndGby(outerUnnest,
- gbyOperator);
+ Triple<Boolean, ILogicalExpression, ILogicalExpression> checkGbyResult =
+ checkUnnestAndGby(outerUnnest, gbyOperator);
// The argument for listify and not(is-missing(...)) check should be variables.
if (!isVariableReference(checkGbyResult.second) || !isVariableReference(checkGbyResult.third)) {
return false;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java
index 334b966..7b46b39 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantListifyRule.java
@@ -142,8 +142,7 @@
if (expr.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return false;
}
- if (((AbstractFunctionCallExpression) expr)
- .getFunctionIdentifier() != BuiltinFunctions.SCAN_COLLECTION) {
+ if (((AbstractFunctionCallExpression) expr).getFunctionIdentifier() != BuiltinFunctions.SCAN_COLLECTION) {
return false;
}
AbstractFunctionCallExpression functionCall = (AbstractFunctionCallExpression) expr;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java
index fe9e49e..53eb33a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/RemoveRedundantSelectRule.java
@@ -44,7 +44,8 @@
public class RemoveRedundantSelectRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
index d22ec54..464476b 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetAsterixPhysicalOperatorsRule.java
@@ -107,8 +107,7 @@
boolean serializable = true;
for (Mutable<ILogicalExpression> exprRef : aggOp.getExpressions()) {
AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) exprRef.getValue();
- if (!BuiltinFunctions
- .isAggregateFunctionSerializable(expr.getFunctionIdentifier())) {
+ if (!BuiltinFunctions.isAggregateFunctionSerializable(expr.getFunctionIdentifier())) {
serializable = false;
break;
}
@@ -121,17 +120,17 @@
// if serializable, use external group-by
// now check whether the serialized version aggregation function has corresponding intermediate agg
boolean hasIntermediateAgg = true;
- IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
- .getMergeAggregationExpressionFactory();
+ IMergeAggregationExpressionFactory mergeAggregationExpressionFactory =
+ context.getMergeAggregationExpressionFactory();
List<LogicalVariable> originalVariables = aggOp.getVariables();
List<Mutable<ILogicalExpression>> aggExprs = aggOp.getExpressions();
int aggNum = aggExprs.size();
for (int i = 0; i < aggNum; i++) {
- AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs
- .get(i).getValue();
- AggregateFunctionCallExpression serialAggExpr = BuiltinFunctions
- .makeSerializableAggregateFunctionExpression(expr.getFunctionIdentifier(),
- expr.getArguments());
+ AbstractFunctionCallExpression expr =
+ (AbstractFunctionCallExpression) aggExprs.get(i).getValue();
+ AggregateFunctionCallExpression serialAggExpr =
+ BuiltinFunctions.makeSerializableAggregateFunctionExpression(
+ expr.getFunctionIdentifier(), expr.getArguments());
if (mergeAggregationExpressionFactory.createMergeAggregation(
originalVariables.get(i), serialAggExpr, context) == null) {
hasIntermediateAgg = false;
@@ -153,16 +152,15 @@
if (hasIntermediateAgg && !multipleAggOpsFound) {
for (int i = 0; i < aggNum; i++) {
- AbstractFunctionCallExpression expr = (AbstractFunctionCallExpression) aggExprs
- .get(i).getValue();
- AggregateFunctionCallExpression serialAggExpr = BuiltinFunctions
- .makeSerializableAggregateFunctionExpression(
+ AbstractFunctionCallExpression expr =
+ (AbstractFunctionCallExpression) aggExprs.get(i).getValue();
+ AggregateFunctionCallExpression serialAggExpr =
+ BuiltinFunctions.makeSerializableAggregateFunctionExpression(
expr.getFunctionIdentifier(), expr.getArguments());
aggOp.getExpressions().get(i).setValue(serialAggExpr);
}
ExternalGroupByPOperator externalGby = new ExternalGroupByPOperator(
- gby.getGroupByList(),
- physicalOptimizationConfig.getMaxFramesForGroupBy(),
+ gby.getGroupByList(), physicalOptimizationConfig.getMaxFramesForGroupBy(),
(long) physicalOptimizationConfig.getMaxFramesForGroupBy()
* physicalOptimizationConfig.getFrameSize());
generateMergeAggregationExpressions(gby, context);
@@ -229,12 +227,12 @@
AccessMethodJobGenParams jobGenParams = new AccessMethodJobGenParams();
jobGenParams.readFromFuncArgs(f.getArguments());
MetadataProvider mp = (MetadataProvider) context.getMetadataProvider();
- DataSourceId dataSourceId = new DataSourceId(jobGenParams.getDataverseName(),
- jobGenParams.getDatasetName());
- Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(),
- jobGenParams.getDatasetName());
- IDataSourceIndex<String, DataSourceId> dsi = mp.findDataSourceIndex(jobGenParams.getIndexName(),
- dataSourceId);
+ DataSourceId dataSourceId =
+ new DataSourceId(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+ Dataset dataset =
+ mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName());
+ IDataSourceIndex<String, DataSourceId> dsi =
+ mp.findDataSourceIndex(jobGenParams.getIndexName(), dataSourceId);
INodeDomain storageDomain = mp.findNodeDomain(dataset.getNodeGroupName());
if (dsi == null) {
throw new AlgebricksException("Could not find index " + jobGenParams.getIndexName()
@@ -300,8 +298,8 @@
"External group-by currently works only for one nested plan with one root containing"
+ "an aggregate and a nested-tuple-source.");
}
- IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
- .getMergeAggregationExpressionFactory();
+ IMergeAggregationExpressionFactory mergeAggregationExpressionFactory =
+ context.getMergeAggregationExpressionFactory();
Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
AbstractLogicalOperator r0Logical = (AbstractLogicalOperator) r0.getValue();
if (r0Logical.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
index d1e7d5c..48a744d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetClosedRecordConstructorsRule.java
@@ -130,8 +130,7 @@
}
}
if (allClosed) {
- expr.setFunctionInfo(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR));
+ expr.setFunctionInfo(FunctionUtil.getFunctionInfo(BuiltinFunctions.CLOSED_RECORD_CONSTRUCTOR));
GlobalConfig.ASTERIX_LOGGER.trace("Switching to CLOSED record constructor in " + expr + ".\n");
changed = true;
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetupCommitExtensionOpRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetupCommitExtensionOpRule.java
index 7dfe161..90491d3 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetupCommitExtensionOpRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SetupCommitExtensionOpRule.java
@@ -99,8 +99,7 @@
//create the logical and physical operator
CommitOperator commitOperator = new CommitOperator(primaryKeyLogicalVars, isSink);
- CommitPOperator commitPOperator =
- new CommitPOperator(dataset, primaryKeyLogicalVars, isSink);
+ CommitPOperator commitPOperator = new CommitPOperator(dataset, primaryKeyLogicalVars, isSink);
commitOperator.setPhysicalOperator(commitPOperator);
//create ExtensionOperator and put the commitOperator in it.
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java
index 1e445e5..0c3de91 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/SimilarityCheckRule.java
@@ -183,8 +183,8 @@
if (simCheckFuncExpr != null) {
// Create a new assign under matchingAssign which assigns the result of our similarity-check function to a variable.
LogicalVariable newVar = context.newVar();
- AssignOperator newAssign = new AssignOperator(newVar,
- new MutableObject<ILogicalExpression>(simCheckFuncExpr));
+ AssignOperator newAssign =
+ new AssignOperator(newVar, new MutableObject<ILogicalExpression>(simCheckFuncExpr));
// Hook up inputs.
newAssign.getInputs()
.add(new MutableObject<ILogicalOperator>(matchingAssign.getInputs().get(0).getValue()));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java
index 29d00d0..1f4676c 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/BTreeAccessMethod.java
@@ -87,12 +87,8 @@
}
private static final List<FunctionIdentifier> FUNC_IDENTIFIERS =
- Collections.unmodifiableList(Arrays.asList(
- AlgebricksBuiltinFunctions.EQ,
- AlgebricksBuiltinFunctions.LE,
- AlgebricksBuiltinFunctions.GE,
- AlgebricksBuiltinFunctions.LT,
- AlgebricksBuiltinFunctions.GT));
+ Collections.unmodifiableList(Arrays.asList(AlgebricksBuiltinFunctions.EQ, AlgebricksBuiltinFunctions.LE,
+ AlgebricksBuiltinFunctions.GE, AlgebricksBuiltinFunctions.LT, AlgebricksBuiltinFunctions.GT));
public static final BTreeAccessMethod INSTANCE = new BTreeAccessMethod();
@@ -105,9 +101,8 @@
public boolean analyzeFuncExprArgsAndUpdateAnalysisCtx(AbstractFunctionCallExpression funcExpr,
List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx,
IOptimizationContext context, IVariableTypeEnvironment typeEnvironment) throws AlgebricksException {
- boolean matches =
- AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVarAndUpdateAnalysisCtx(
- funcExpr, analysisCtx, context, typeEnvironment);
+ boolean matches = AccessMethodUtils.analyzeFuncExprArgsForOneConstAndVarAndUpdateAnalysisCtx(funcExpr,
+ analysisCtx, context, typeEnvironment);
if (!matches) {
matches = AccessMethodUtils.analyzeFuncExprArgsForTwoVarsAndUpdateAnalysisCtx(funcExpr, analysisCtx);
}
@@ -131,13 +126,13 @@
SelectOperator select = (SelectOperator) selectRef.getValue();
Mutable<ILogicalExpression> conditionRef = select.getCondition();
- ILogicalOperator primaryIndexUnnestOp = createSecondaryToPrimaryPlan(conditionRef, subTree, null, chosenIndex,
- analysisCtx,
- AccessMethodUtils.retainInputs(subTree.getDataSourceVariables(), subTree.getDataSourceRef().getValue(),
- afterSelectRefs),
- false, subTree.getDataSourceRef().getValue().getInputs().get(0).getValue()
- .getExecutionMode() == ExecutionMode.UNPARTITIONED,
- context);
+ ILogicalOperator primaryIndexUnnestOp =
+ createSecondaryToPrimaryPlan(conditionRef, subTree, null, chosenIndex, analysisCtx,
+ AccessMethodUtils.retainInputs(subTree.getDataSourceVariables(),
+ subTree.getDataSourceRef().getValue(), afterSelectRefs),
+ false, subTree.getDataSourceRef().getValue().getInputs().get(0).getValue()
+ .getExecutionMode() == ExecutionMode.UNPARTITIONED,
+ context);
if (primaryIndexUnnestOp == null) {
return false;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
index 5c1a95e..1171ae5 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceJoinAccessMethodRule.java
@@ -361,8 +361,8 @@
// in GroupByOp.
if (isThisOpLeftOuterJoin && isParentOpGroupBy) {
analysisCtx.setLOJGroupbyOpRef(opRef);
- ScalarFunctionCallExpression isNullFuncExpr = AccessMethodUtils
- .findLOJIsMissingFuncInGroupBy((GroupByOperator) opRef.getValue());
+ ScalarFunctionCallExpression isNullFuncExpr =
+ AccessMethodUtils.findLOJIsMissingFuncInGroupBy((GroupByOperator) opRef.getValue());
analysisCtx.setLOJIsNullFuncInGroupBy(isNullFuncExpr);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
index eaea208..7b8b906 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroducePrimaryIndexForAggregationRule.java
@@ -123,21 +123,20 @@
}
context.addToDontApplySet(this, opRef.getValue());
// find the data scan or unnest map
- Pair<Mutable<ILogicalOperator>,Mutable<ILogicalOperator>> scanAndAssignOpRef =
- findScanAndAssignOperator(localAggregateOperator,context.getMetadataProvider());
+ Pair<Mutable<ILogicalOperator>, Mutable<ILogicalOperator>> scanAndAssignOpRef =
+ findScanAndAssignOperator(localAggregateOperator, context.getMetadataProvider());
if (scanAndAssignOpRef == null) {
return false;
}
// find its primary index and replace datascan
- boolean transformed =
- replaceDatascan(localAggregateOperator,scanAndAssignOpRef, context);
+ boolean transformed = replaceDatascan(localAggregateOperator, scanAndAssignOpRef, context);
if (transformed) {
OperatorPropertiesUtil.typeOpRec(opRef, context);
}
return transformed;
}
- private Pair<Mutable<ILogicalOperator>,Mutable<ILogicalOperator>> findScanAndAssignOperator(
+ private Pair<Mutable<ILogicalOperator>, Mutable<ILogicalOperator>> findScanAndAssignOperator(
ILogicalOperator localAggregateOperator, IMetadataProvider metadataProvider) throws AlgebricksException {
Mutable<ILogicalOperator> scanOpRef = localAggregateOperator.getInputs().get(0);
Mutable<ILogicalOperator> assignOpRef = null;
@@ -148,8 +147,8 @@
scanOpRef = scanOpRef.getValue().getInputs().get(0);
}
// next operator must be datascan or unnest map using the dataset
- if (scanOpRef.getValue().getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN &&
- scanOpRef.getValue().getOperatorTag() != LogicalOperatorTag.UNNEST_MAP) {
+ if (scanOpRef.getValue().getOperatorTag() != LogicalOperatorTag.DATASOURCESCAN
+ && scanOpRef.getValue().getOperatorTag() != LogicalOperatorTag.UNNEST_MAP) {
return null;
}
if (scanOpRef.getValue().getOperatorTag() == LogicalOperatorTag.UNNEST_MAP) {
@@ -159,31 +158,31 @@
if (logicalExpression.getExpressionTag() != LogicalExpressionTag.FUNCTION_CALL) {
return null;
}
- AbstractFunctionCallExpression functionCallExpression = (AbstractFunctionCallExpression)logicalExpression;
+ AbstractFunctionCallExpression functionCallExpression = (AbstractFunctionCallExpression) logicalExpression;
if (functionCallExpression.getFunctionIdentifier() != BuiltinFunctions.INDEX_SEARCH) {
return null;
}
- String indexName = ConstantExpressionUtil.getStringArgument(functionCallExpression,0);
- String dataverseName = ConstantExpressionUtil.getStringArgument(functionCallExpression,2);
- String datasetName = ConstantExpressionUtil.getStringArgument(functionCallExpression,3);
- Index index = ((MetadataProvider)metadataProvider).getIndex(dataverseName, datasetName, indexName);
+ String indexName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 0);
+ String dataverseName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 2);
+ String datasetName = ConstantExpressionUtil.getStringArgument(functionCallExpression, 3);
+ Index index = ((MetadataProvider) metadataProvider).getIndex(dataverseName, datasetName, indexName);
if (!index.isPrimaryIndex()) {
return null;
}
}
- return Pair.of(scanOpRef,assignOpRef);
+ return Pair.of(scanOpRef, assignOpRef);
}
private boolean replaceDatascan(AggregateOperator localAggregateOperator,
- Pair<Mutable<ILogicalOperator>,Mutable<ILogicalOperator>> scanAndAssignOpRef, IOptimizationContext context)
+ Pair<Mutable<ILogicalOperator>, Mutable<ILogicalOperator>> scanAndAssignOpRef, IOptimizationContext context)
throws AlgebricksException {
/* find the primary index */
Mutable<ILogicalOperator> scanOperatorRef = scanAndAssignOpRef.getLeft();
Mutable<ILogicalOperator> assignOperatorRef = scanAndAssignOpRef.getRight();
AbstractScanOperator scanOperator = (AbstractScanOperator) scanOperatorRef.getValue();
BTreeJobGenParams originalBTreeParameters = new BTreeJobGenParams();
- Pair<Dataset,Index> datasetAndIndex = findDatasetAndSecondaryPrimaryIndex(scanOperator,originalBTreeParameters,
- context);
+ Pair<Dataset, Index> datasetAndIndex =
+ findDatasetAndSecondaryPrimaryIndex(scanOperator, originalBTreeParameters, context);
if (datasetAndIndex == null) {
return false;
}
@@ -194,8 +193,8 @@
/////// check usage of variables produced by scan operator in parents ///////
Set<LogicalVariable> variablesProducedByScanOp = getVariablesProducedByScanOp(scanOperator,
dataset.getPrimaryKeys().size(), scanOperator.getVariables().size());
- boolean variablesAreUsed = scanOperatorVariablesAreUsed(localAggregateOperator, assignOperatorRef,
- variablesProducedByScanOp);
+ boolean variablesAreUsed =
+ scanOperatorVariablesAreUsed(localAggregateOperator, assignOperatorRef, variablesProducedByScanOp);
if (variablesAreUsed) {
return false;
}
@@ -206,8 +205,8 @@
retainInput = AccessMethodUtils.retainInputs(scanOperator.getVariables(), scanOperator, parents);
newBTreeParameters = new BTreeJobGenParams(primaryIndex.getIndexName(), DatasetConfig.IndexType.BTREE,
dataset.getDataverseName(), dataset.getDatasetName(), retainInput,
- scanOperator.getInputs().get(0).getValue().getExecutionMode() ==
- AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
+ scanOperator.getInputs().get(0).getValue()
+ .getExecutionMode() == AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
List<LogicalVariable> empty = new ArrayList<>();
newBTreeParameters.setLowKeyInclusive(true);
newBTreeParameters.setHighKeyInclusive(true);
@@ -227,9 +226,9 @@
newBTreeParameters.setHighKeyVarList(originalBTreeParameters.getHighKeyVarList(), 0,
originalBTreeParameters.getHighKeyVarList().size());
}
- ARecordType recordType = (ARecordType) ((MetadataProvider)context.getMetadataProvider()).findType(dataset);
+ ARecordType recordType = (ARecordType) ((MetadataProvider) context.getMetadataProvider()).findType(dataset);
ARecordType metaRecordType =
- (ARecordType) ((MetadataProvider)context.getMetadataProvider()).findMetaType(dataset);
+ (ARecordType) ((MetadataProvider) context.getMetadataProvider()).findMetaType(dataset);
// create the operator that will replace the dataset scan/search
AbstractUnnestMapOperator primaryIndexUnnestOperator =
(AbstractUnnestMapOperator) AccessMethodUtils.createSecondaryIndexUnnestMap(dataset, recordType,
@@ -256,7 +255,7 @@
* @return The dataset and its primary index
* @throws AlgebricksException when there is a problem getting the dataset or its indexes from the metadata
*/
- private Pair<Dataset,Index> findDatasetAndSecondaryPrimaryIndex(AbstractScanOperator scanOperator,
+ private Pair<Dataset, Index> findDatasetAndSecondaryPrimaryIndex(AbstractScanOperator scanOperator,
BTreeJobGenParams originalBTreeParameters, IOptimizationContext context) throws AlgebricksException {
// #1. get the dataset
Dataset dataset;
@@ -271,20 +270,20 @@
} else {
// case 2: dataset range search
AbstractFunctionCallExpression primaryIndexFunctionCall =
- (AbstractFunctionCallExpression) ((UnnestMapOperator)scanOperator).getExpressionRef().getValue();
+ (AbstractFunctionCallExpression) ((UnnestMapOperator) scanOperator).getExpressionRef().getValue();
originalBTreeParameters.readFromFuncArgs(primaryIndexFunctionCall.getArguments());
if (originalBTreeParameters.isEqCondition()) {
return null;
}
- dataset = ((MetadataProvider)context.getMetadataProvider()).findDataset(
- originalBTreeParameters.getDataverseName(), originalBTreeParameters.getDatasetName());
+ dataset = ((MetadataProvider) context.getMetadataProvider())
+ .findDataset(originalBTreeParameters.getDataverseName(), originalBTreeParameters.getDatasetName());
}
// #2. get all indexes and look for the primary one
- List<Index> indexes = ((MetadataProvider)context.getMetadataProvider()).getDatasetIndexes(
- dataset.getDataverseName(), dataset.getDatasetName());
+ List<Index> indexes = ((MetadataProvider) context.getMetadataProvider())
+ .getDatasetIndexes(dataset.getDataverseName(), dataset.getDatasetName());
for (Index index : indexes) {
if (index.getKeyFieldNames().isEmpty()) {
- return Pair.of(dataset,index);
+ return Pair.of(dataset, index);
}
}
return null;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
index d95b278..d0e973f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/IntroduceSelectAccessMethodRule.java
@@ -177,8 +177,8 @@
Map<IAccessMethod, AccessMethodAnalysisContext> analyzedAMs, IOptimizationContext context)
throws AlgebricksException {
Pair<IAccessMethod, Index> chosenIndex = null;
- Optional<Pair<IAccessMethod, Index>> primaryIndex = chosenIndexes.stream()
- .filter(pair -> pair.second.isPrimaryIndex()).findFirst();
+ Optional<Pair<IAccessMethod, Index>> primaryIndex =
+ chosenIndexes.stream().filter(pair -> pair.second.isPrimaryIndex()).findFirst();
if (chosenIndexes.size() == 1) {
chosenIndex = chosenIndexes.get(0);
} else if (primaryIndex.isPresent()) {
@@ -235,8 +235,8 @@
throw new AlgebricksException(
"The order by expression should be variables, but they aren't variables.");
}
- VariableReferenceExpression orderedVar = (VariableReferenceExpression) orderExpression.second
- .getValue();
+ VariableReferenceExpression orderedVar =
+ (VariableReferenceExpression) orderExpression.second.getValue();
orderedColumn.add(orderedVar.getVariableReference());
}
inputVars.add(orderedColumn);
@@ -373,7 +373,6 @@
}
-
@Override
public Map<FunctionIdentifier, List<IAccessMethod>> getAccessMethods() {
return accessMethods;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
index d1506f7..1c7330a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/InvertedIndexAccessMethod.java
@@ -156,7 +156,7 @@
public boolean analyzeGetItemFuncExpr(AbstractFunctionCallExpression funcExpr,
List<AbstractLogicalOperator> assignsAndUnnests, AccessMethodAnalysisContext analysisCtx)
- throws AlgebricksException {
+ throws AlgebricksException {
if (funcExpr.getFunctionIdentifier() != BuiltinFunctions.GET_ITEM) {
return false;
}
@@ -213,8 +213,8 @@
if (unnestFuncExpr.getFunctionIdentifier() != BuiltinFunctions.SCAN_COLLECTION) {
return false;
}
- matchedFuncExpr = (AbstractFunctionCallExpression) unnestFuncExpr.getArguments().get(0)
- .getValue();
+ matchedFuncExpr =
+ (AbstractFunctionCallExpression) unnestFuncExpr.getArguments().get(0).getValue();
}
}
// We've already found a match.
@@ -254,13 +254,13 @@
|| arg2.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
return false;
}
- LogicalVariable fieldVarExpr1 = getNonConstArgFieldExprPair(arg1, funcExpr, assignsAndUnnests,
- matchedAssignOrUnnestIndex);
+ LogicalVariable fieldVarExpr1 =
+ getNonConstArgFieldExprPair(arg1, funcExpr, assignsAndUnnests, matchedAssignOrUnnestIndex);
if (fieldVarExpr1 == null) {
return false;
}
- LogicalVariable fieldVarExpr2 = getNonConstArgFieldExprPair(arg2, funcExpr, assignsAndUnnests,
- matchedAssignOrUnnestIndex);
+ LogicalVariable fieldVarExpr2 =
+ getNonConstArgFieldExprPair(arg2, funcExpr, assignsAndUnnests, matchedAssignOrUnnestIndex);
if (fieldVarExpr2 == null) {
return false;
}
@@ -307,8 +307,8 @@
} else {
return false;
}
- LogicalVariable fieldVarExpr = getNonConstArgFieldExprPair(nonConstArg, funcExpr, assignsAndUnnests,
- matchedAssignOrUnnestIndex);
+ LogicalVariable fieldVarExpr =
+ getNonConstArgFieldExprPair(nonConstArg, funcExpr, assignsAndUnnests, matchedAssignOrUnnestIndex);
if (fieldVarExpr == null) {
return false;
}
@@ -388,9 +388,9 @@
// we made sure indexSubTree has datasource scan
DataSourceScanOperator dataSourceScan = (DataSourceScanOperator) indexSubTree.getDataSourceRef().getValue();
- InvertedIndexJobGenParams jobGenParams = new InvertedIndexJobGenParams(chosenIndex.getIndexName(),
- chosenIndex.getIndexType(), dataset.getDataverseName(), dataset.getDatasetName(), retainInput,
- requiresBroadcast);
+ InvertedIndexJobGenParams jobGenParams =
+ new InvertedIndexJobGenParams(chosenIndex.getIndexName(), chosenIndex.getIndexType(),
+ dataset.getDataverseName(), dataset.getDatasetName(), retainInput, requiresBroadcast);
// Add function-specific args such as search modifier, and possibly a similarity threshold.
addFunctionSpecificArgs(optFuncExpr, jobGenParams);
// Add the type of search key from the optFuncExpr.
@@ -424,9 +424,9 @@
metaRecordType, chosenIndex, inputOp, jobGenParams, context, true, retainInput, retainNull);
// Generate the rest of the upstream plan which feeds the search results into the primary index.
- AbstractUnnestMapOperator primaryIndexUnnestOp = AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceScan,
- dataset, recordType, metaRecordType, secondaryIndexUnnestOp, context, true, retainInput, retainNull,
- false);
+ AbstractUnnestMapOperator primaryIndexUnnestOp =
+ AccessMethodUtils.createPrimaryIndexUnnestMap(dataSourceScan, dataset, recordType, metaRecordType,
+ secondaryIndexUnnestOp, context, true, retainInput, retainNull, false);
return primaryIndexUnnestOp;
}
@@ -526,8 +526,7 @@
Mutable<ILogicalOperator> panicJoinRef = null;
Map<LogicalVariable, LogicalVariable> panicVarMap = null;
if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CHECK
- || optFuncExpr.getFuncExpr()
- .getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+ || optFuncExpr.getFuncExpr().getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
panicJoinRef = new MutableObject<>(joinRef.getValue());
panicVarMap = new HashMap<>();
Mutable<ILogicalOperator> newProbeRootRef = createPanicNestedLoopJoinPlan(panicJoinRef, indexSubTree,
@@ -577,8 +576,8 @@
// Place a top-level equi-join on top to retrieve the missing variables from the original probe subtree.
// The inner (build) branch of the join is the subtree with the data scan, since the result of the similarity join could potentially be big.
// This choice may not always be the most efficient, but it seems more robust than the alternative.
- Mutable<ILogicalExpression> eqJoinConditionRef = createPrimaryKeysEqJoinCondition(originalSubTreePKs,
- surrogateSubTreePKs);
+ Mutable<ILogicalExpression> eqJoinConditionRef =
+ createPrimaryKeysEqJoinCondition(originalSubTreePKs, surrogateSubTreePKs);
InnerJoinOperator topEqJoin = new InnerJoinOperator(eqJoinConditionRef, originalProbeSubTreeRootRef,
new MutableObject<ILogicalOperator>(topOp));
topEqJoin.setExecutionMode(ExecutionMode.PARTITIONED);
@@ -628,14 +627,14 @@
}
// Create first copy.
- LogicalOperatorDeepCopyWithNewVariablesVisitor firstDeepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(
- context, context, newProbeSubTreeVarMap, true);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor firstDeepCopyVisitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context, newProbeSubTreeVarMap, true);
ILogicalOperator newProbeSubTree = firstDeepCopyVisitor.deepCopy(probeSubTree.getRoot());
inferTypes(newProbeSubTree, context);
Mutable<ILogicalOperator> newProbeSubTreeRootRef = new MutableObject<ILogicalOperator>(newProbeSubTree);
// Create second copy.
- LogicalOperatorDeepCopyWithNewVariablesVisitor secondDeepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(
- context, context, joinInputSubTreeVarMap, true);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor secondDeepCopyVisitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context, joinInputSubTreeVarMap, true);
ILogicalOperator joinInputSubTree = secondDeepCopyVisitor.deepCopy(probeSubTree.getRoot());
inferTypes(joinInputSubTree, context);
probeSubTree.getRootRef().setValue(joinInputSubTree);
@@ -670,8 +669,8 @@
args.add(
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(surrogateSubTreePKs.get(i))));
args.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(originalSubTreePKs.get(i))));
- ILogicalExpression eqFunc = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(AlgebricksBuiltinFunctions.EQ), args);
+ ILogicalExpression eqFunc =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(AlgebricksBuiltinFunctions.EQ), args);
eqExprs.add(new MutableObject<ILogicalExpression>(eqFunc));
}
if (eqExprs.size() == 1) {
@@ -712,8 +711,8 @@
VariableUtilities.getLiveVariables(indexSubTree.getRoot(), originalLiveVars);
// Copy the scan subtree in indexSubTree.
- LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(
- context, context);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context);
ILogicalOperator scanSubTree = deepCopyVisitor.deepCopy(indexSubTree.getRoot());
Map<LogicalVariable, LogicalVariable> copyVarMap = deepCopyVisitor.getInputToOutputVariableMapping();
@@ -777,8 +776,8 @@
throw CompilationException.create(ErrorCode.NO_SUPPORTED_TYPE);
}
- SelectOperator isFilterableSelectOp = new SelectOperator(
- new MutableObject<ILogicalExpression>(isFilterableExpr), false, null);
+ SelectOperator isFilterableSelectOp =
+ new SelectOperator(new MutableObject<ILogicalExpression>(isFilterableExpr), false, null);
isFilterableSelectOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
isFilterableSelectOp.setExecutionMode(ExecutionMode.LOCAL);
context.computeAndSetTypeEnvironmentForOperator(isFilterableSelectOp);
@@ -788,8 +787,8 @@
isNotFilterableArgs.add(new MutableObject<ILogicalExpression>(isFilterableExpr));
ILogicalExpression isNotFilterableExpr = new ScalarFunctionCallExpression(
FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT), isNotFilterableArgs);
- SelectOperator isNotFilterableSelectOp = new SelectOperator(
- new MutableObject<ILogicalExpression>(isNotFilterableExpr), false, null);
+ SelectOperator isNotFilterableSelectOp =
+ new SelectOperator(new MutableObject<ILogicalExpression>(isNotFilterableExpr), false, null);
isNotFilterableSelectOp.getInputs().add(new MutableObject<ILogicalOperator>(inputOp));
isNotFilterableSelectOp.setExecutionMode(ExecutionMode.LOCAL);
context.computeAndSetTypeEnvironmentForOperator(isNotFilterableSelectOp);
@@ -847,8 +846,7 @@
return;
}
if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CHECK
- || optFuncExpr.getFuncExpr()
- .getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+ || optFuncExpr.getFuncExpr().getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
if (optFuncExpr.containsPartialField()) {
jobGenParams.setSearchModifierType(SearchModifierType.CONJUNCTIVE_EDIT_DISTANCE);
} else {
@@ -896,7 +894,7 @@
private void addKeyVarsAndExprs(IOptimizableFuncExpr optFuncExpr, ArrayList<LogicalVariable> keyVarList,
ArrayList<Mutable<ILogicalExpression>> keyExprList, IOptimizationContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
// For now we are assuming a single secondary index key.
// Add a variable and its expr to the lists which will be passed into an assign op.
LogicalVariable keyVar = context.newVar();
@@ -913,8 +911,7 @@
}
if (optFuncExpr.getFuncExpr().getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CHECK
- || optFuncExpr.getFuncExpr()
- .getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
+ || optFuncExpr.getFuncExpr().getFunctionIdentifier() == BuiltinFunctions.EDIT_DISTANCE_CONTAINS) {
return isEditDistanceFuncOptimizable(index, optFuncExpr);
}
@@ -970,8 +967,8 @@
// Check for panic in selection query.
// TODO: Panic also depends on prePost which is currently hardcoded to be true.
- AsterixConstantValue listOrStrConstVal = (AsterixConstantValue) ((ConstantExpression) optFuncExpr
- .getConstantExpr(0)).getValue();
+ AsterixConstantValue listOrStrConstVal =
+ (AsterixConstantValue) ((ConstantExpression) optFuncExpr.getConstantExpr(0)).getValue();
IAObject listOrStrObj = listOrStrConstVal.getObject();
ATypeTag typeTag = listOrStrObj.getType().getTypeTag();
@@ -979,8 +976,8 @@
return false;
}
- AsterixConstantValue intConstVal = (AsterixConstantValue) ((ConstantExpression) optFuncExpr.getConstantExpr(1))
- .getValue();
+ AsterixConstantValue intConstVal =
+ (AsterixConstantValue) ((ConstantExpression) optFuncExpr.getConstantExpr(1)).getValue();
IAObject intObj = intConstVal.getObject();
AInt32 edThresh = null;
@@ -1168,8 +1165,8 @@
}
private boolean isContainsFuncSelectOptimizable(Index index, IOptimizableFuncExpr optFuncExpr) {
- AsterixConstantValue strConstVal = (AsterixConstantValue) ((ConstantExpression) optFuncExpr.getConstantExpr(0))
- .getValue();
+ AsterixConstantValue strConstVal =
+ (AsterixConstantValue) ((ConstantExpression) optFuncExpr.getConstantExpr(0)).getValue();
IAObject strObj = strConstVal.getObject();
ATypeTag typeTag = strObj.getType().getTypeTag();
@@ -1265,8 +1262,8 @@
}
}
default: {
- throw CompilationException.create(ErrorCode.INCOMPATIBLE_SEARCH_MODIFIER,
- searchModifierType, index.getIndexType());
+ throw CompilationException.create(ErrorCode.INCOMPATIBLE_SEARCH_MODIFIER, searchModifierType,
+ index.getIndexType());
}
}
default:
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
index b66a663..fd46194 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/am/RTreeAccessMethod.java
@@ -196,8 +196,8 @@
int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numSecondaryKeys = numDimensions * 2;
// we made sure indexSubTree has datasource scan
- AbstractDataSourceOperator dataSourceOp = (AbstractDataSourceOperator) indexSubTree.getDataSourceRef()
- .getValue();
+ AbstractDataSourceOperator dataSourceOp =
+ (AbstractDataSourceOperator) indexSubTree.getDataSourceRef().getValue();
RTreeJobGenParams jobGenParams = new RTreeJobGenParams(chosenIndex.getIndexName(), IndexType.RTREE,
dataset.getDataverseName(), dataset.getDatasetName(), retainInput, requiresBroadcast);
// A spatial object is serialized in the constant of the func expr we are optimizing.
@@ -214,8 +214,8 @@
for (int i = 0; i < numSecondaryKeys; i++) {
// The create MBR function "extracts" one field of an MBR around the given spatial object.
- AbstractFunctionCallExpression createMBR = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
+ AbstractFunctionCallExpression createMBR =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.CREATE_MBR));
// Spatial object is the constant from the func expr we are optimizing.
createMBR.getArguments().add(new MutableObject<>(searchKeyExpr));
// The number of dimensions.
@@ -236,8 +236,8 @@
if (probeSubTree == null) {
// We are optimizing a selection query.
// Input to this assign is the EmptyTupleSource (which the dataSourceScan also must have had as input).
- assignSearchKeys.getInputs().add(new MutableObject<>(
- OperatorManipulationUtil.deepCopy(dataSourceOp.getInputs().get(0).getValue())));
+ assignSearchKeys.getInputs().add(
+ new MutableObject<>(OperatorManipulationUtil.deepCopy(dataSourceOp.getInputs().get(0).getValue())));
assignSearchKeys.setExecutionMode(dataSourceOp.getExecutionMode());
} else {
// We are optimizing a join, place the assign op top of the probe subtree.
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
index 9a49472..8bfa53a 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineAllNtsInSubplanVisitor.java
@@ -136,8 +136,8 @@
// Maps live variables at <code>subplanInputOperator</code> to variables in
// the flattened nested plan.
- private final LinkedHashMap<LogicalVariable, LogicalVariable> subplanInputVarToCurrentVarMap = new
- LinkedHashMap<>();
+ private final LinkedHashMap<LogicalVariable, LogicalVariable> subplanInputVarToCurrentVarMap =
+ new LinkedHashMap<>();
// Maps variables in the flattened nested plan to live variables at
// <code>subplannputOperator</code>.
@@ -288,15 +288,13 @@
if (!correlatedKeyVars.contains(inputLiveVar)) {
recordConstructorArgs.add(new MutableObject<>(new ConstantExpression(
new AsterixConstantValue(new AString(Integer.toString(inputLiveVar.getId()))))));
- recordConstructorArgs
- .add(new MutableObject<>(new VariableReferenceExpression(inputLiveVar)));
+ recordConstructorArgs.add(new MutableObject<>(new VariableReferenceExpression(inputLiveVar)));
}
}
LogicalVariable recordVar = context.newVar();
- Mutable<ILogicalExpression> recordExprRef = new MutableObject<ILogicalExpression>(
- new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR),
- recordConstructorArgs));
+ Mutable<ILogicalExpression> recordExprRef =
+ new MutableObject<ILogicalExpression>(new ScalarFunctionCallExpression(
+ FunctionUtil.getFunctionInfo(BuiltinFunctions.OPEN_RECORD_CONSTRUCTOR), recordConstructorArgs));
AssignOperator assignOp = new AssignOperator(recordVar, recordExprRef);
return new Pair<>(assignOp, recordVar);
}
@@ -363,13 +361,11 @@
private Pair<ILogicalOperator, LogicalVariable> createUnnestForAggregatedList(LogicalVariable aggVar) {
LogicalVariable unnestVar = context.newVar();
// Creates an unnest function expression.
- Mutable<ILogicalExpression> unnestArg = new MutableObject<>(
- new VariableReferenceExpression(aggVar));
+ Mutable<ILogicalExpression> unnestArg = new MutableObject<>(new VariableReferenceExpression(aggVar));
List<Mutable<ILogicalExpression>> unnestArgList = new ArrayList<>();
unnestArgList.add(unnestArg);
- Mutable<ILogicalExpression> unnestExpr = new MutableObject<>(
- new UnnestingFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.SCAN_COLLECTION), unnestArgList));
+ Mutable<ILogicalExpression> unnestExpr = new MutableObject<>(new UnnestingFunctionCallExpression(
+ FunctionUtil.getFunctionInfo(BuiltinFunctions.SCAN_COLLECTION), unnestArgList));
ILogicalOperator unnestOp = new UnnestOperator(unnestVar, unnestExpr);
return new Pair<>(unnestOp, unnestVar);
}
@@ -414,15 +410,15 @@
if (op.getDataSourceReference().getValue() != subplanOperator) {
return op;
}
- LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(
- context, context);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(context, context);
ILogicalOperator copiedInputOperator = deepCopyVisitor.deepCopy(subplanInputOperator);
// Updates the primary key info in the copied plan segment.
Map<LogicalVariable, LogicalVariable> varMap = deepCopyVisitor.getInputToOutputVariableMapping();
addPrimaryKeys(varMap);
- Pair<ILogicalOperator, Set<LogicalVariable>> primaryOpAndVars = EquivalenceClassUtils
- .findOrCreatePrimaryKeyOpAndVariables(copiedInputOperator, true, context);
+ Pair<ILogicalOperator, Set<LogicalVariable>> primaryOpAndVars =
+ EquivalenceClassUtils.findOrCreatePrimaryKeyOpAndVariables(copiedInputOperator, true, context);
correlatedKeyVars.clear();
correlatedKeyVars.addAll(primaryOpAndVars.second);
// Update key variables and input-output-var mapping.
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java
index 6a318d6..4a28344 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineLeftNtsInSubplanJoinFlatteningVisitor.java
@@ -399,8 +399,8 @@
*/
private void injectNullCheckVars(AbstractBinaryJoinOperator joinOp) {
LogicalVariable assignVar = context.newVar();
- ILogicalOperator assignOp = new AssignOperator(assignVar,
- new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
+ ILogicalOperator assignOp =
+ new AssignOperator(assignVar, new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
assignOp.getInputs().add(joinOp.getInputs().get(1));
joinOp.getInputs().set(1, new MutableObject<ILogicalOperator>(assignOp));
nullCheckVars.add(assignVar);
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java
index 8ab8b1d..a9cd806 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/InlineSubplanInputForNestedTupleSourceRule.java
@@ -273,12 +273,11 @@
}
private Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> rewriteSubplanOperator(
- Mutable<ILogicalOperator> opRef,
- IOptimizationContext context) throws AlgebricksException {
+ Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
// Recursively traverses input operators as if the current operator before rewriting the current operator.
- Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> changedAndVarMap = traverseNonSubplanOperator(op,
- context);
+ Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> changedAndVarMap =
+ traverseNonSubplanOperator(op, context);
if (op.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
return changedAndVarMap;
}
@@ -312,17 +311,15 @@
* @throws AlgebricksException
*/
private Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> traverseNonSubplanOperator(
- ILogicalOperator op,
- IOptimizationContext context) throws AlgebricksException {
+ ILogicalOperator op, IOptimizationContext context) throws AlgebricksException {
Set<LogicalVariable> liveVars = new HashSet<>();
VariableUtilities.getLiveVariables(op, liveVars);
LinkedHashMap<LogicalVariable, LogicalVariable> replacedVarMap = new LinkedHashMap<>();
LinkedHashMap<LogicalVariable, LogicalVariable> replacedVarMapForAncestor = new LinkedHashMap<>();
boolean changed = false;
for (Mutable<ILogicalOperator> childrenRef : op.getInputs()) {
- Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> resultFromChild = rewriteSubplanOperator(
- childrenRef,
- context);
+ Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> resultFromChild =
+ rewriteSubplanOperator(childrenRef, context);
changed = changed || resultFromChild.first;
resultFromChild.second.forEach((oldVar, newVar) -> {
if (liveVars.contains(oldVar)) {
@@ -344,8 +341,7 @@
}
private Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> applyGeneralFlattening(
- Mutable<ILogicalOperator> opRef,
- IOptimizationContext context) throws AlgebricksException {
+ Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
SubplanOperator subplanOp = (SubplanOperator) opRef.getValue();
if (!SubplanFlatteningUtil.containsOperators(subplanOp,
ImmutableSet.of(LogicalOperatorTag.DATASOURCESCAN, LogicalOperatorTag.INNERJOIN,
@@ -356,24 +352,24 @@
Mutable<ILogicalOperator> inputOpRef = subplanOp.getInputs().get(0);
ILogicalOperator inputOpBackup = inputOpRef.getValue();
// Creates parameters for the left outer join operator.
- Pair<ILogicalOperator, Set<LogicalVariable>> primaryOpAndVars = EquivalenceClassUtils
- .findOrCreatePrimaryKeyOpAndVariables(inputOpBackup, true, context);
+ Pair<ILogicalOperator, Set<LogicalVariable>> primaryOpAndVars =
+ EquivalenceClassUtils.findOrCreatePrimaryKeyOpAndVariables(inputOpBackup, true, context);
ILogicalOperator inputOp = primaryOpAndVars.first;
Set<LogicalVariable> primaryKeyVars = primaryOpAndVars.second;
inputOpRef.setValue(inputOp);
Set<LogicalVariable> inputLiveVars = new HashSet<>();
VariableUtilities.getLiveVariables(inputOp, inputLiveVars);
- Pair<Map<LogicalVariable, LogicalVariable>, List<Pair<IOrder, Mutable<ILogicalExpression>>>> varMapAndOrderExprs = SubplanFlatteningUtil
- .inlineAllNestedTupleSource(subplanOp, context);
+ Pair<Map<LogicalVariable, LogicalVariable>, List<Pair<IOrder, Mutable<ILogicalExpression>>>> varMapAndOrderExprs =
+ SubplanFlatteningUtil.inlineAllNestedTupleSource(subplanOp, context);
Map<LogicalVariable, LogicalVariable> varMap = varMapAndOrderExprs.first;
if (varMap == null) {
inputOpRef.setValue(inputOpBackup);
return new Pair<>(false, new LinkedHashMap<>());
}
- Mutable<ILogicalOperator> lowestAggregateRefInSubplan = SubplanFlatteningUtil
- .findLowestAggregate(subplanOp.getNestedPlans().get(0).getRoots().get(0));
+ Mutable<ILogicalOperator> lowestAggregateRefInSubplan =
+ SubplanFlatteningUtil.findLowestAggregate(subplanOp.getNestedPlans().get(0).getRoots().get(0));
Mutable<ILogicalOperator> rightInputOpRef = lowestAggregateRefInSubplan.getValue().getInputs().get(0);
ILogicalOperator rightInputOp = rightInputOpRef.getValue();
@@ -400,14 +396,16 @@
? new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(AlgebricksBuiltinFunctions.AND),
joinPredicates)
: joinPredicates.size() > 0 ? joinPredicates.get(0).getValue() : ConstantExpression.TRUE;
- LeftOuterJoinOperator leftOuterJoinOp = new LeftOuterJoinOperator(
- new MutableObject<>(joinExpr), inputOpRef, rightInputOpRef);
+ LeftOuterJoinOperator leftOuterJoinOp =
+ new LeftOuterJoinOperator(new MutableObject<>(joinExpr), inputOpRef, rightInputOpRef);
OperatorManipulationUtil.computeTypeEnvironmentBottomUp(rightInputOp, context);
context.computeAndSetTypeEnvironmentForOperator(leftOuterJoinOp);
// Creates group-by operator.
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByDecorList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByList =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByDecorList =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
List<ILogicalPlan> nestedPlans = new ArrayList<>();
GroupByOperator groupbyOp = new GroupByOperator(groupByList, groupByDecorList, nestedPlans);
@@ -439,17 +437,14 @@
// Adds a select operator into the nested plan for group-by to remove tuples with NULL on {@code assignVar}, i.e.,
// subplan input tuples that are filtered out within a subplan.
- Mutable<ILogicalExpression> filterVarExpr = new MutableObject<>(
- new VariableReferenceExpression(assignVar));
+ Mutable<ILogicalExpression> filterVarExpr = new MutableObject<>(new VariableReferenceExpression(assignVar));
List<Mutable<ILogicalExpression>> args = new ArrayList<>();
args.add(filterVarExpr);
List<Mutable<ILogicalExpression>> argsForNotFunction = new ArrayList<>();
- argsForNotFunction.add(new MutableObject<>(new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.IS_MISSING), args)));
- SelectOperator selectOp = new SelectOperator(
- new MutableObject<>(new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT), argsForNotFunction)),
- false, null);
+ argsForNotFunction.add(new MutableObject<>(
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.IS_MISSING), args)));
+ SelectOperator selectOp = new SelectOperator(new MutableObject<>(new ScalarFunctionCallExpression(
+ FunctionUtil.getFunctionInfo(BuiltinFunctions.NOT), argsForNotFunction)), false, null);
currentOpRef.getValue().getInputs().add(new MutableObject<>(selectOp));
selectOp.getInputs().add(new MutableObject<>(new NestedTupleSourceOperator(new MutableObject<>(groupbyOp))));
@@ -464,8 +459,8 @@
// Recursively applys this rule to the nested plan of the subplan operator,
// for the case where there are nested subplan operators within {@code subplanOp}.
- Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> result = rewriteSubplanOperator(rightInputOpRef,
- context);
+ Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> result =
+ rewriteSubplanOperator(rightInputOpRef, context);
VariableUtilities.substituteVariables(leftOuterJoinOp, result.second, context);
VariableUtilities.substituteVariables(groupbyOp, result.second, context);
@@ -474,29 +469,28 @@
}
private Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> applySpecialFlattening(
- Mutable<ILogicalOperator> opRef,
- IOptimizationContext context) throws AlgebricksException {
+ Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
SubplanOperator subplanOp = (SubplanOperator) opRef.getValue();
Mutable<ILogicalOperator> inputOpRef = subplanOp.getInputs().get(0);
LinkedHashMap<LogicalVariable, LogicalVariable> replacedVarMap = new LinkedHashMap<>();
// Recursively applies this rule to the nested plan of the subplan operator,
// for the case where there are nested subplan operators within {@code subplanOp}.
- Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> result = rewriteSubplanOperator(
- subplanOp.getNestedPlans().get(0).getRoots().get(0), context);
+ Pair<Boolean, LinkedHashMap<LogicalVariable, LogicalVariable>> result =
+ rewriteSubplanOperator(subplanOp.getNestedPlans().get(0).getRoots().get(0), context);
ILogicalOperator inputOpBackup = inputOpRef.getValue();
// Gets live variables and covering variables from the subplan's input operator.
- Pair<ILogicalOperator, Set<LogicalVariable>> primaryOpAndVars = EquivalenceClassUtils
- .findOrCreatePrimaryKeyOpAndVariables(inputOpBackup, false, context);
+ Pair<ILogicalOperator, Set<LogicalVariable>> primaryOpAndVars =
+ EquivalenceClassUtils.findOrCreatePrimaryKeyOpAndVariables(inputOpBackup, false, context);
ILogicalOperator inputOp = primaryOpAndVars.first;
Set<LogicalVariable> primaryKeyVars = primaryOpAndVars.second;
inputOpRef.setValue(inputOp);
Set<LogicalVariable> liveVars = new HashSet<>();
VariableUtilities.getLiveVariables(inputOp, liveVars);
- Pair<Set<LogicalVariable>, Mutable<ILogicalOperator>> notNullVarsAndTopJoinRef = SubplanFlatteningUtil
- .inlineLeftNtsInSubplanJoin(subplanOp, context);
+ Pair<Set<LogicalVariable>, Mutable<ILogicalOperator>> notNullVarsAndTopJoinRef =
+ SubplanFlatteningUtil.inlineLeftNtsInSubplanJoin(subplanOp, context);
if (notNullVarsAndTopJoinRef.first == null) {
inputOpRef.setValue(inputOpBackup);
return new Pair<>(false, replacedVarMap);
@@ -506,8 +500,10 @@
Mutable<ILogicalOperator> topJoinRef = notNullVarsAndTopJoinRef.second;
// Creates a group-by operator.
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByDecorList = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByList =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByDecorList =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
GroupByOperator groupbyOp = new GroupByOperator(groupByList, groupByDecorList, subplanOp.getNestedPlans());
for (LogicalVariable coverVar : primaryKeyVars) {
@@ -529,8 +525,8 @@
// subplan input tuples that are filtered out within a subplan.
List<Mutable<ILogicalExpression>> nullCheckExprRefs = new ArrayList<>();
for (LogicalVariable notNullVar : notNullVars) {
- Mutable<ILogicalExpression> filterVarExpr = new MutableObject<>(
- new VariableReferenceExpression(notNullVar));
+ Mutable<ILogicalExpression> filterVarExpr =
+ new MutableObject<>(new VariableReferenceExpression(notNullVar));
List<Mutable<ILogicalExpression>> args = new ArrayList<>();
args.add(filterVarExpr);
List<Mutable<ILogicalExpression>> argsForNotFunction = new ArrayList<>();
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanFlatteningUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanFlatteningUtil.java
index 377e96d..dba5d47 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanFlatteningUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/subplan/SubplanFlatteningUtil.java
@@ -91,16 +91,16 @@
*/
public static Pair<Set<LogicalVariable>, Mutable<ILogicalOperator>> inlineLeftNtsInSubplanJoin(
SubplanOperator subplanOp, IOptimizationContext context) throws AlgebricksException {
- Pair<Boolean, ILogicalOperator> applicableAndNtsToRewrite = SubplanFlatteningUtil
- .isQualifiedForSpecialFlattening(subplanOp);
+ Pair<Boolean, ILogicalOperator> applicableAndNtsToRewrite =
+ SubplanFlatteningUtil.isQualifiedForSpecialFlattening(subplanOp);
if (!applicableAndNtsToRewrite.first) {
return new Pair<Set<LogicalVariable>, Mutable<ILogicalOperator>>(null, null);
}
ILogicalOperator qualifiedNts = applicableAndNtsToRewrite.second;
ILogicalOperator subplanInputOp = subplanOp.getInputs().get(0).getValue();
- InlineLeftNtsInSubplanJoinFlatteningVisitor specialVisitor = new InlineLeftNtsInSubplanJoinFlatteningVisitor(
- context, subplanInputOp, qualifiedNts);
+ InlineLeftNtsInSubplanJoinFlatteningVisitor specialVisitor =
+ new InlineLeftNtsInSubplanJoinFlatteningVisitor(context, subplanInputOp, qualifiedNts);
// Rewrites the query plan.
Mutable<ILogicalOperator> topRef = subplanOp.getNestedPlans().get(0).getRoots().get(0);
@@ -157,7 +157,7 @@
}
if (currentOp.getOperatorTag() == LogicalOperatorTag.SUBPLAN
&& containsOperators((SubplanOperator) currentOp, interestedOperatorTags)) {
- return true;
+ return true;
}
for (Mutable<ILogicalOperator> childRef : currentOp.getInputs()) {
if (containsOperatorsInternal(childRef.getValue(), interestedOperatorTags)) {
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
index c7ceefd..0d53a19 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/temporal/TranslateIntervalExpressionRule.java
@@ -106,66 +106,66 @@
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_MET_BY)) {
exprRef.setValue(getEqualExpr(getIntervalStartExpr(interval1), getIntervalEndExpr(interval2)));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_STARTS)) {
- ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
- getIntervalStartExpr(interval2));
- ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval1),
- getIntervalEndExpr(interval2));
+ ILogicalExpression startExpr =
+ getEqualExpr(getIntervalStartExpr(interval1), getIntervalStartExpr(interval2));
+ ILogicalExpression endExpr =
+ getLessThanOrEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
exprRef.setValue(getAndExpr(startExpr, endExpr));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_STARTED_BY)) {
- ILogicalExpression startExpr = getEqualExpr(getIntervalStartExpr(interval1),
- getIntervalStartExpr(interval2));
- ILogicalExpression endExpr = getLessThanOrEqualExpr(getIntervalEndExpr(interval2),
- getIntervalEndExpr(interval1));
+ ILogicalExpression startExpr =
+ getEqualExpr(getIntervalStartExpr(interval1), getIntervalStartExpr(interval2));
+ ILogicalExpression endExpr =
+ getLessThanOrEqualExpr(getIntervalEndExpr(interval2), getIntervalEndExpr(interval1));
exprRef.setValue(getAndExpr(startExpr, endExpr));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_ENDS)) {
ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
- ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval1),
- getIntervalStartExpr(interval2));
+ ILogicalExpression startExpr =
+ getLessThanOrEqualExpr(getIntervalStartExpr(interval1), getIntervalStartExpr(interval2));
exprRef.setValue(getAndExpr(startExpr, endExpr));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_ENDED_BY)) {
ILogicalExpression endExpr = getEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
- ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval2),
- getIntervalStartExpr(interval1));
+ ILogicalExpression startExpr =
+ getLessThanOrEqualExpr(getIntervalStartExpr(interval2), getIntervalStartExpr(interval1));
exprRef.setValue(getAndExpr(startExpr, endExpr));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_BEFORE)) {
exprRef.setValue(getLessThanExpr(getIntervalEndExpr(interval1), getIntervalStartExpr(interval2)));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_AFTER)) {
exprRef.setValue(getGreaterThanExpr(getIntervalStartExpr(interval1), getIntervalEndExpr(interval2)));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_OVERLAPS)) {
- ILogicalExpression expr1 = getLessThanExpr(getIntervalStartExpr(interval1),
- getIntervalStartExpr(interval2));
+ ILogicalExpression expr1 =
+ getLessThanExpr(getIntervalStartExpr(interval1), getIntervalStartExpr(interval2));
ILogicalExpression expr2 = getGreaterThanExpr(getIntervalEndExpr(interval2), getIntervalEndExpr(interval1));
- ILogicalExpression expr3 = getGreaterThanExpr(getIntervalEndExpr(interval1),
- getIntervalStartExpr(interval2));
+ ILogicalExpression expr3 =
+ getGreaterThanExpr(getIntervalEndExpr(interval1), getIntervalStartExpr(interval2));
exprRef.setValue(getAndExpr(getAndExpr(expr1, expr2), expr3));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_OVERLAPPED_BY)) {
- ILogicalExpression expr1 = getLessThanExpr(getIntervalStartExpr(interval2),
- getIntervalStartExpr(interval1));
+ ILogicalExpression expr1 =
+ getLessThanExpr(getIntervalStartExpr(interval2), getIntervalStartExpr(interval1));
ILogicalExpression expr2 = getGreaterThanExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
- ILogicalExpression expr3 = getGreaterThanExpr(getIntervalEndExpr(interval2),
- getIntervalStartExpr(interval1));
+ ILogicalExpression expr3 =
+ getGreaterThanExpr(getIntervalEndExpr(interval2), getIntervalStartExpr(interval1));
exprRef.setValue(getAndExpr(getAndExpr(expr1, expr2), expr3));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_OVERLAPPING)) {
- ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval1),
- getIntervalEndExpr(interval2));
- ILogicalExpression endExpr = getGreaterThanOrEqualExpr(getIntervalEndExpr(interval1),
- getIntervalStartExpr(interval2));
- ILogicalExpression startPointExpr = getNotEqualExpr(getIntervalEndExpr(interval1),
- getIntervalStartExpr(interval2));
- ILogicalExpression endPointExpr = getNotEqualExpr(getIntervalStartExpr(interval1),
- getIntervalEndExpr(interval2));
+ ILogicalExpression startExpr =
+ getLessThanOrEqualExpr(getIntervalStartExpr(interval1), getIntervalEndExpr(interval2));
+ ILogicalExpression endExpr =
+ getGreaterThanOrEqualExpr(getIntervalEndExpr(interval1), getIntervalStartExpr(interval2));
+ ILogicalExpression startPointExpr =
+ getNotEqualExpr(getIntervalEndExpr(interval1), getIntervalStartExpr(interval2));
+ ILogicalExpression endPointExpr =
+ getNotEqualExpr(getIntervalStartExpr(interval1), getIntervalEndExpr(interval2));
exprRef.setValue(getAndExpr(getAndExpr(startExpr, endExpr), getAndExpr(startPointExpr, endPointExpr)));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_COVERS)) {
- ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval1),
- getIntervalStartExpr(interval2));
- ILogicalExpression endExpr = getGreaterThanOrEqualExpr(getIntervalEndExpr(interval1),
- getIntervalEndExpr(interval2));
+ ILogicalExpression startExpr =
+ getLessThanOrEqualExpr(getIntervalStartExpr(interval1), getIntervalStartExpr(interval2));
+ ILogicalExpression endExpr =
+ getGreaterThanOrEqualExpr(getIntervalEndExpr(interval1), getIntervalEndExpr(interval2));
exprRef.setValue(getAndExpr(startExpr, endExpr));
} else if (funcExpr.getFunctionIdentifier().equals(BuiltinFunctions.INTERVAL_COVERED_BY)) {
- ILogicalExpression startExpr = getLessThanOrEqualExpr(getIntervalStartExpr(interval2),
- getIntervalStartExpr(interval1));
- ILogicalExpression endExpr = getGreaterThanOrEqualExpr(getIntervalEndExpr(interval2),
- getIntervalEndExpr(interval1));
+ ILogicalExpression startExpr =
+ getLessThanOrEqualExpr(getIntervalStartExpr(interval2), getIntervalStartExpr(interval1));
+ ILogicalExpression endExpr =
+ getGreaterThanOrEqualExpr(getIntervalEndExpr(interval2), getIntervalEndExpr(interval1));
exprRef.setValue(getAndExpr(startExpr, endExpr));
} else {
return false;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
index 9b5654a..82d963d 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/optimizer/rules/util/EquivalenceClassUtils.java
@@ -121,8 +121,8 @@
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(referredRecordVar)),
new MutableObject<ILogicalExpression>(
new ConstantExpression(new AsterixConstantValue(new AInt32(fieldIndexInRecord)))));
- EquivalenceClass equivClass = new EquivalenceClass(Collections.singletonList(var), var,
- Collections.singletonList(expr));
+ EquivalenceClass equivClass =
+ new EquivalenceClass(Collections.singletonList(var), var, Collections.singletonList(expr));
Map<LogicalVariable, EquivalenceClass> equivalenceMap = context.getEquivalenceClassMap(operator);
if (equivalenceMap == null) {
equivalenceMap = new HashMap<LogicalVariable, EquivalenceClass>();
@@ -148,7 +148,7 @@
*/
public static Pair<ILogicalOperator, Set<LogicalVariable>> findOrCreatePrimaryKeyOpAndVariables(
ILogicalOperator operator, boolean usedForCorrelationJoin, IOptimizationContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
computePrimaryKeys(operator, context);
Set<LogicalVariable> liveVars = new HashSet<>();
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
index ae60c71..acf2908 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AbstractLangTranslator.java
@@ -157,8 +157,8 @@
if (hints != null && !hints.isEmpty()) {
StringBuilder errorMsgBuffer = new StringBuilder();
for (Entry<String, String> hint : hints.entrySet()) {
- Pair<Boolean, String> validationResult = DatasetHints.validate(appCtx, hint.getKey(),
- hint.getValue());
+ Pair<Boolean, String> validationResult =
+ DatasetHints.validate(appCtx, hint.getKey(), hint.getValue());
if (!validationResult.first) {
errorMsgBuffer.append("Dataset: ").append(datasetStmt.getName().getValue())
.append(" error in processing hint: ").append(hint.getKey()).append(" ")
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
index 18304b3..531de59 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/AqlPlusExpressionToPlanTranslator.java
@@ -144,7 +144,6 @@
return new Pair<ILogicalOperator, LogicalVariable>(a, var);
}
-
public void addVariableToMetaScope(Identifier id, LogicalVariable var) {
metaScopeExp.put(id, var);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
index 2b83158..a3d3ac9 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/LangExpressionToPlanTranslator.java
@@ -837,8 +837,8 @@
if (gc.hasGroupVar()) {
VariableExpr groupVar = gc.getGroupVar();
LogicalVariable groupLogicalVar = context.newVar();
- ILogicalPlan nestedPlan = createNestedPlanWithAggregate(groupLogicalVar,
- BuiltinFunctions.LISTIFY, new VariableReferenceExpression(groupRecordVar),
+ ILogicalPlan nestedPlan = createNestedPlanWithAggregate(groupLogicalVar, BuiltinFunctions.LISTIFY,
+ new VariableReferenceExpression(groupRecordVar),
new MutableObject<>(new NestedTupleSourceOperator(new MutableObject<>(gOp))));
gOp.getNestedPlans().add(nestedPlan);
context.setVar(groupVar, groupLogicalVar);
@@ -851,8 +851,8 @@
Pair<ILogicalExpression, Mutable<ILogicalOperator>> listifyInput = langExprToAlgExpression(withExpr,
new MutableObject<>(new NestedTupleSourceOperator(new MutableObject<>(gOp))));
LogicalVariable withLogicalVar = context.newVar();
- ILogicalPlan nestedPlan = createNestedPlanWithAggregate(withLogicalVar,
- BuiltinFunctions.LISTIFY, listifyInput.first, listifyInput.second);
+ ILogicalPlan nestedPlan = createNestedPlanWithAggregate(withLogicalVar, BuiltinFunctions.LISTIFY,
+ listifyInput.first, listifyInput.second);
gOp.getNestedPlans().add(nestedPlan);
context.setVar(withVar, withLogicalVar);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java
index 1160aaa..cb6d8e5 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SessionConfig.java
@@ -61,10 +61,8 @@
public static PlanFormat get(String fmtString, String label, PlanFormat defaultFmt, Logger logger) {
try {
if (fmtString != null) {
- String format =
- ("JSON".equalsIgnoreCase(fmtString) || "CLEAN_JSON".equalsIgnoreCase(fmtString))
- ? "JSON"
- : fmtString;
+ String format = ("JSON".equalsIgnoreCase(fmtString) || "CLEAN_JSON".equalsIgnoreCase(fmtString))
+ ? "JSON" : fmtString;
return PlanFormat.valueOf(format);
}
} catch (IllegalArgumentException e) {
@@ -168,6 +166,7 @@
public SessionConfig(OutputFormat fmt, boolean optimize, boolean executeQuery, boolean generateJobSpec) {
this(fmt, optimize, executeQuery, generateJobSpec, PlanFormat.STRING);
}
+
public SessionConfig(OutputFormat fmt, boolean optimize, boolean executeQuery, boolean generateJobSpec,
PlanFormat lpfmt) {
this.fmt = fmt;
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SqlppExpressionToPlanTranslator.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SqlppExpressionToPlanTranslator.java
index 36183c6..200cc6f 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SqlppExpressionToPlanTranslator.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/SqlppExpressionToPlanTranslator.java
@@ -345,8 +345,8 @@
}
// Adds an aggregate operator to listfy unnest variables.
- AggregateFunctionCallExpression fListify = BuiltinFunctions
- .makeAggregateFunctionExpression(BuiltinFunctions.LISTIFY, mkSingletonArrayList(
+ AggregateFunctionCallExpression fListify =
+ BuiltinFunctions.makeAggregateFunctionExpression(BuiltinFunctions.LISTIFY, mkSingletonArrayList(
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(varToListify))));
LogicalVariable aggVar = context.newSubplanOutputVar();
@@ -510,8 +510,8 @@
// A "THEN" branch can be entered only when the tuple has not enter any other preceding
// branches and the current "WHEN" condition is TRUE.
- branchEntraceConditionExprRef = new MutableObject<>(new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.AND), andArgs));
+ branchEntraceConditionExprRef = new MutableObject<>(
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.AND), andArgs));
}
// Translates the corresponding "THEN" expression.
@@ -539,8 +539,8 @@
arguments.add(new MutableObject<>(argVar));
}
arguments.add(new MutableObject<>(new VariableReferenceExpression(opAndVarForElse.second)));
- AbstractFunctionCallExpression swithCaseExpr = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.SWITCH_CASE), arguments);
+ AbstractFunctionCallExpression swithCaseExpr =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.SWITCH_CASE), arguments);
AssignOperator assignOp = new AssignOperator(selectVar, new MutableObject<>(swithCaseExpr));
assignOp.getInputs().add(new MutableObject<>(opAndVarForElse.first));
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java
index 1344e0c..33653ad 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/TranslationContext.java
@@ -97,7 +97,6 @@
return var;
}
-
public void setVar(VariableExpr v, LogicalVariable var) {
currentVarMap.put(v.getVar().getId(), var);
}
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java
index 5e70baf..919bdf2 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/PlanTranslationUtil.java
@@ -44,8 +44,8 @@
List<Mutable<ILogicalExpression>> assignExpressions, List<LogicalVariable> vars,
List<Mutable<ILogicalExpression>> varRefs, IVariableContext context) {
IAObject value = (field.size() > 1) ? new AOrderedList(field) : new AString(field.get(0));
- ScalarFunctionCallExpression metaKeyFunction = new ScalarFunctionCallExpression(
- FunctionUtil.getFunctionInfo(BuiltinFunctions.META_KEY));
+ ScalarFunctionCallExpression metaKeyFunction =
+ new ScalarFunctionCallExpression(FunctionUtil.getFunctionInfo(BuiltinFunctions.META_KEY));
metaKeyFunction.getArguments()
.add(new MutableObject<ILogicalExpression>(new VariableReferenceExpression(resVar)));
metaKeyFunction.getArguments()
diff --git a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
index cda9dd3..4b4b2b0 100644
--- a/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
+++ b/asterixdb/asterix-algebra/src/main/java/org/apache/asterix/translator/util/ValidateUtil.java
@@ -178,9 +178,9 @@
* the type of the index that its key fields is being validated
* @throws AlgebricksException
*/
- public static void validateKeyFields(ARecordType recType, ARecordType metaRecType,
- List<List<String>> keyFieldNames, List<Integer> keySourceIndicators, List<IAType> keyFieldTypes,
- IndexType indexType) throws AlgebricksException {
+ public static void validateKeyFields(ARecordType recType, ARecordType metaRecType, List<List<String>> keyFieldNames,
+ List<Integer> keySourceIndicators, List<IAType> keyFieldTypes, IndexType indexType)
+ throws AlgebricksException {
List<IAType> fieldTypes =
KeyFieldTypeUtil.getKeyTypes(recType, metaRecType, keyFieldNames, keySourceIndicators);
int pos = 0;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
index b0edb3e..0f91275 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/common/APIFramework.java
@@ -350,12 +350,10 @@
// Sets a required capacity, only for read-only queries.
// DDLs and DMLs are considered not that frequent.
// limit the computation locations to the locations that will be used in the query
- final AlgebricksAbsolutePartitionConstraint jobLocations =
- getJobLocations(spec, metadataProvider.getApplicationContext().getNodeJobTracker(),
- computationLocations);
- final IClusterCapacity jobRequiredCapacity = ResourceUtils
- .getRequiredCapacity(plan, jobLocations, sortFrameLimit, groupFrameLimit, joinFrameLimit,
- frameSize);
+ final AlgebricksAbsolutePartitionConstraint jobLocations = getJobLocations(spec,
+ metadataProvider.getApplicationContext().getNodeJobTracker(), computationLocations);
+ final IClusterCapacity jobRequiredCapacity = ResourceUtils.getRequiredCapacity(plan, jobLocations,
+ sortFrameLimit, groupFrameLimit, joinFrameLimit, frameSize);
spec.setRequiredClusterCapacity(jobRequiredCapacity);
}
@@ -512,8 +510,7 @@
public static AlgebricksAbsolutePartitionConstraint getJobLocations(JobSpecification spec,
INodeJobTracker jobTracker, AlgebricksAbsolutePartitionConstraint clusterLocations) {
final Set<String> jobParticipatingNodes = jobTracker.getJobParticipatingNodes(spec);
- return new AlgebricksAbsolutePartitionConstraint(
- Arrays.stream(clusterLocations.getLocations()).filter(jobParticipatingNodes::contains)
- .toArray(String[]::new));
+ return new AlgebricksAbsolutePartitionConstraint(Arrays.stream(clusterLocations.getLocations())
+ .filter(jobParticipatingNodes::contains).toArray(String[]::new));
}
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
index df2a2a1..63896f2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ApiServlet.java
@@ -90,11 +90,11 @@
this.componentProvider = componentProvider;
}
- @Override protected void post(IServletRequest request, IServletResponse response) {
+ @Override
+ protected void post(IServletRequest request, IServletResponse response) {
// Query language
- ILangCompilationProvider compilationProvider = "AQL".equals(request.getParameter("query-language")) ?
- aqlCompilationProvider :
- sqlppCompilationProvider;
+ ILangCompilationProvider compilationProvider = "AQL".equals(request.getParameter("query-language"))
+ ? aqlCompilationProvider : sqlppCompilationProvider;
IParserFactory parserFactory = compilationProvider.getParserFactory();
// Output format.
@@ -150,14 +150,12 @@
}
IParser parser = parserFactory.createParser(query);
List<Statement> aqlStatements = parser.parse();
- SessionConfig sessionConfig =
- new SessionConfig(format, true, isSet(executeQuery), true, planFormat);
+ SessionConfig sessionConfig = new SessionConfig(format, true, isSet(executeQuery), true, planFormat);
sessionConfig.set(SessionConfig.FORMAT_HTML, true);
sessionConfig.set(SessionConfig.FORMAT_CSV_HEADER, csvAndHeader);
sessionConfig.set(SessionConfig.FORMAT_WRAPPER_ARRAY, isSet(wrapperArray));
- sessionConfig
- .setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam), isSet(printLogicalPlanParam),
- isSet(printOptimizedLogicalPlanParam), isSet(printJob));
+ sessionConfig.setOOBData(isSet(printExprParam), isSet(printRewrittenExprParam),
+ isSet(printLogicalPlanParam), isSet(printOptimizedLogicalPlanParam), isSet(printJob));
SessionOutput sessionOutput = new SessionOutput(sessionConfig, out);
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExectorFactory.create(appCtx, aqlStatements, sessionOutput,
@@ -232,8 +230,8 @@
try {
line = br.readLine();
} catch (NullPointerException e) {
- LOGGER.log(Level.WARN,
- "NPE reading resource " + resourcePath + ", assuming JDK-8080094; returning 404", e);
+ LOGGER.log(Level.WARN, "NPE reading resource " + resourcePath + ", assuming JDK-8080094; returning 404",
+ e);
// workaround lame JDK bug where a broken InputStream is returned in case the resourcePath is a
// directory; see https://bugs.openjdk.java.net/browse/JDK-8080094
response.setStatus(HttpResponseStatus.NOT_FOUND);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
index f7f8385..0f08a22 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/DiagnosticsApiServlet.java
@@ -119,10 +119,10 @@
ccFutureData = new HashMap<>();
ccFutureData.put("threaddump",
executor.submit(() -> fixupKeys((ObjectNode) OBJECT_MAPPER.readTree(processThreadDump(null)))));
- ccFutureData.put("config", executor.submit(
- () -> fixupKeys((ObjectNode) OBJECT_MAPPER.readTree(processNodeDetails(null, false, true)))));
- ccFutureData.put("stats", executor.submit(
- () -> fixupKeys((ObjectNode) OBJECT_MAPPER.readTree(processNodeDetails(null, true, false)))));
+ ccFutureData.put("config", executor
+ .submit(() -> fixupKeys((ObjectNode) OBJECT_MAPPER.readTree(processNodeDetails(null, false, true)))));
+ ccFutureData.put("stats", executor
+ .submit(() -> fixupKeys((ObjectNode) OBJECT_MAPPER.readTree(processNodeDetails(null, true, false)))));
return ccFutureData;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java
index 6c94344..8fd40b3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryCancellationServlet.java
@@ -54,8 +54,8 @@
}
// Retrieves the corresponding Hyracks job id.
- IStatementExecutorContext runningQueries = (IStatementExecutorContext) ctx
- .get(ServletConstants.RUNNING_QUERIES_ATTR);
+ IStatementExecutorContext runningQueries =
+ (IStatementExecutorContext) ctx.get(ServletConstants.RUNNING_QUERIES_ATTR);
IHyracksClientConnection hcc = (IHyracksClientConnection) ctx.get(ServletConstants.HYRACKS_CONNECTION_ATTR);
JobId jobId = runningQueries.getJobIdFromClientContextId(clientContextId);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
index 23a7ba7..1057a73 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/QueryServiceServlet.java
@@ -125,7 +125,7 @@
@Override
protected void options(IServletRequest request, IServletResponse response) throws Exception {
response.setHeader("Access-Control-Allow-Origin",
- "http://" + hostName + ":" + appCtx.getExternalProperties().getQueryWebInterfacePort());
+ "http://" + hostName + ":" + appCtx.getExternalProperties().getQueryWebInterfacePort());
response.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
response.setStatus(HttpResponseStatus.OK);
}
@@ -453,9 +453,8 @@
ResultDelivery delivery = parseResultDelivery(param.mode);
- final ResultProperties resultProperties = param.maxResultReads == null ?
- new ResultProperties(delivery) :
- new ResultProperties(delivery, Long.parseLong(param.maxResultReads));
+ final ResultProperties resultProperties = param.maxResultReads == null ? new ResultProperties(delivery)
+ : new ResultProperties(delivery, Long.parseLong(param.maxResultReads));
String handleUrl = getHandleUrl(param.host, param.path, delivery);
SessionOutput sessionOutput = createSessionOutput(param, handleUrl, httpWriter);
@@ -526,9 +525,8 @@
IStatementExecutor translator = statementExecutorFactory.create((ICcApplicationContext) appCtx, statements,
sessionOutput, compilationProvider, componentProvider);
execution.start();
- final IRequestParameters requestParameters =
- new org.apache.asterix.app.translator.RequestParameters(getHyracksDataset(), resultProperties, stats,
- null, param.clientContextID, optionalParameters);
+ final IRequestParameters requestParameters = new org.apache.asterix.app.translator.RequestParameters(
+ getHyracksDataset(), resultProperties, stats, null, param.clientContextID, optionalParameters);
translator.compileAndExecute(getHyracksClientConnection(), queryCtx, requestParameters);
execution.end();
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
index 360c522..428a4e0 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/RestApiServlet.java
@@ -209,9 +209,8 @@
MetadataManager.INSTANCE.init();
IStatementExecutor translator = statementExecutorFactory.create(appCtx, aqlStatements, sessionOutput,
compilationProvider, componentProvider);
- final IRequestParameters requestParameters =
- new RequestParameters(hds, new ResultProperties(resultDelivery), new IStatementExecutor.Stats(),
- null, null, null);
+ final IRequestParameters requestParameters = new RequestParameters(hds,
+ new ResultProperties(resultDelivery), new IStatementExecutor.Stats(), null, null, null);
translator.compileAndExecute(hcc, null, requestParameters);
} catch (AsterixException | TokenMgrError | org.apache.asterix.aqlplus.parser.TokenMgrError pe) {
response.setStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
index aace681..ec128c2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/http/server/ResultUtil.java
@@ -54,9 +54,9 @@
public class ResultUtil {
private static final Logger LOGGER = LogManager.getLogger();
- public static final List<Pair<Character, String>> HTML_ENTITIES = Collections.unmodifiableList(
- Arrays.asList(Pair.of('&', "&"), Pair.of('"', """), Pair.of('<', "<"), Pair.of('>', ">"),
- Pair.of('\'', "'")));
+ public static final List<Pair<Character, String>> HTML_ENTITIES =
+ Collections.unmodifiableList(Arrays.asList(Pair.of('&', "&"), Pair.of('"', """),
+ Pair.of('<', "<"), Pair.of('>', ">"), Pair.of('\'', "'")));
private ResultUtil() {
}
@@ -208,8 +208,8 @@
errorCode = 4;
}
- ObjectNode errorResp = ResultUtil
- .getErrorResponse(errorCode, extractErrorMessage(e), extractErrorSummary(e), extractFullStackTrace(e));
+ ObjectNode errorResp = ResultUtil.getErrorResponse(errorCode, extractErrorMessage(e), extractErrorSummary(e),
+ extractFullStackTrace(e));
out.write(errorResp.toString());
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
index 4ecd978..0e51953 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/api/java/AsterixJavaClient.java
@@ -94,8 +94,7 @@
public void compile(boolean optimize, boolean printRewrittenExpressions, boolean printLogicalPlan,
boolean printOptimizedPlan, boolean printPhysicalOpsOnly, boolean generateBinaryRuntime, boolean printJob,
- PlanFormat pformat)
- throws Exception {
+ PlanFormat pformat) throws Exception {
queryJobSpec = null;
dmlJobs = null;
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
index da2c99a..b8ed03f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/ActiveNotificationHandler.java
@@ -256,8 +256,7 @@
}
}
- public void resume(MetadataProvider mdProvider)
- throws HyracksDataException, InterruptedException {
+ public void resume(MetadataProvider mdProvider) throws HyracksDataException, InterruptedException {
LOGGER.log(level, "Resuming active events handler");
for (IActiveEntityEventsListener listener : entityEventListeners.values()) {
LOGGER.log(level, "Resuming " + listener.getEntityId());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/FeedEventsListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/FeedEventsListener.java
index c0ce6ec..fbf644f 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/FeedEventsListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/active/FeedEventsListener.java
@@ -67,8 +67,8 @@
@Override
public synchronized void remove(Dataset dataset) throws HyracksDataException {
super.remove(dataset);
- feedConnections.removeIf(o -> o.getDataverseName().equals(dataset.getDataverseName()) && o.getDatasetName()
- .equals(dataset.getDatasetName()));
+ feedConnections.removeIf(o -> o.getDataverseName().equals(dataset.getDataverseName())
+ && o.getDatasetName().equals(dataset.getDatasetName()));
}
public synchronized void addFeedConnection(FeedConnection feedConnection) {
@@ -115,8 +115,8 @@
// Construct ActiveMessage
for (int i = 0; i < getLocations().getLocations().length; i++) {
String intakeLocation = getLocations().getLocations()[i];
- FeedOperations
- .SendStopMessageToNode(metadataProvider.getApplicationContext(), entityId, intakeLocation, i);
+ FeedOperations.SendStopMessageToNode(metadataProvider.getApplicationContext(), entityId, intakeLocation,
+ i);
}
eventSubscriber.sync();
} catch (Exception e) {
@@ -126,8 +126,7 @@
}
@Override
- protected void setRunning(MetadataProvider metadataProvider, boolean running)
- throws HyracksDataException {
+ protected void setRunning(MetadataProvider metadataProvider, boolean running) throws HyracksDataException {
// No op
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryClassLoader.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryClassLoader.java
index 711ac6d..9cf7584 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryClassLoader.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryClassLoader.java
@@ -23,7 +23,8 @@
public class ExternalLibraryClassLoader extends URLClassLoader {
- private static final ClassLoader bootClassLoader = new ClassLoader(null){};
+ private static final ClassLoader bootClassLoader = new ClassLoader(null) {
+ };
public ExternalLibraryClassLoader(URL[] urls, ClassLoader parentClassLoader) {
super(urls, parentClassLoader);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
index f2933f4..c9f777d 100755
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
@@ -171,8 +171,7 @@
// belong to the library?
if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
// remove adapter <! we didn't check if there are feeds which use this adapter>
- MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse,
- adapter.getAdapterIdentifier().getName());
+ MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
}
}
// drop the library itself
@@ -394,8 +393,8 @@
// Check managix directory first. If not exists, check app home.
File installDir = new File(System.getProperty("user.dir"), "library");
if (!installDir.exists()) {
- installDir = new File(System.getProperty("app.home", System.getProperty("user.home"))
- + File.separator + "lib" + File.separator + "udfs");
+ installDir = new File(System.getProperty("app.home", System.getProperty("user.home")) + File.separator
+ + "lib" + File.separator + "udfs");
}
return installDir;
}
@@ -407,8 +406,8 @@
// Check managix directory first. If not exists, check app home.
File uninstallDir = new File(System.getProperty("user.dir"), "uninstall");
if (!uninstallDir.exists()) {
- uninstallDir = new File(System.getProperty("app.home", System.getProperty("user.home"))
- + File.separator + "lib" + File.separator + "udfs" + File.separator + "uninstall");
+ uninstallDir = new File(System.getProperty("app.home", System.getProperty("user.home")) + File.separator
+ + "lib" + File.separator + "udfs" + File.separator + "uninstall");
}
return uninstallDir;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalUDFLibrarian.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalUDFLibrarian.java
index 75207e8..29a4b36 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalUDFLibrarian.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalUDFLibrarian.java
@@ -89,8 +89,8 @@
installLibDir.mkdir();
}
// copy the library file into the directory
- File destinationDir = new File(
- installLibDir.getAbsolutePath() + File.separator + dvName + File.separator + libName);
+ File destinationDir =
+ new File(installLibDir.getAbsolutePath() + File.separator + dvName + File.separator + libName);
FileUtils.deleteQuietly(destinationDir);
destinationDir.mkdirs();
try {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java
index 5b0eb97..31b213e 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/message/ExecuteStatementRequestMessage.java
@@ -122,9 +122,8 @@
IStatementExecutor translator = statementExecutorFactory.create(ccAppCtx, statements, sessionOutput,
compilationProvider, storageComponentProvider);
final IStatementExecutor.Stats stats = new IStatementExecutor.Stats();
- final IRequestParameters requestParameters =
- new RequestParameters(null, resultProperties, stats, outMetadata, clientContextID,
- optionalParameters);
+ final IRequestParameters requestParameters = new RequestParameters(null, resultProperties, stats,
+ outMetadata, clientContextID, optionalParameters);
translator.compileAndExecute(ccApp.getHcc(), statementExecutorContext, requestParameters);
outPrinter.close();
responseMsg.setResult(outWriter.toString());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
index cb8c161..c554cbd 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/NCAppRuntimeContext.java
@@ -203,8 +203,8 @@
datasetMemoryManager, indexCheckpointManagerProvider, ioManager.getIODevices().size());
final String nodeId = getServiceContext().getNodeId();
final ClusterPartition[] nodePartitions = metadataProperties.getNodePartitions().get(nodeId);
- final Set<Integer> nodePartitionsIds = Arrays.stream(nodePartitions).map(ClusterPartition::getPartitionId)
- .collect(Collectors.toSet());
+ final Set<Integer> nodePartitionsIds =
+ Arrays.stream(nodePartitions).map(ClusterPartition::getPartitionId).collect(Collectors.toSet());
replicaManager = new ReplicaManager(this, nodePartitionsIds);
isShuttingdown = false;
activeManager = new ActiveManager(threadExecutor, getServiceContext().getNodeId(),
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
index c189983..3717673 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/nc/RecoveryManager.java
@@ -97,7 +97,6 @@
private final INCServiceContext serviceCtx;
private final INcApplicationContext appCtx;
-
public RecoveryManager(ITransactionSubsystem txnSubsystem, INCServiceContext serviceCtx) {
this.serviceCtx = serviceCtx;
this.txnSubsystem = txnSubsystem;
@@ -414,8 +413,8 @@
long minFirstLSN = logMgr.getAppendLSN();
if (!openIndexList.isEmpty()) {
for (IIndex index : openIndexList) {
- AbstractLSMIOOperationCallback ioCallback = (AbstractLSMIOOperationCallback) ((ILSMIndex) index)
- .getIOOperationCallback();
+ AbstractLSMIOOperationCallback ioCallback =
+ (AbstractLSMIOOperationCallback) ((ILSMIndex) index).getIOOperationCallback();
if (!((AbstractLSMIndex) index).isCurrentMutableComponentEmpty() || ioCallback.hasPendingFlush()) {
firstLSN = ioCallback.getFirstLSN();
minFirstLSN = Math.min(minFirstLSN, firstLSN);
@@ -583,9 +582,9 @@
if (activePartitions.contains(logRecord.getResourcePartition())) {
undoLSNSet = jobLoserEntity2LSNsMap.get(tempKeyTxnEntityId);
if (undoLSNSet == null) {
- loserEntity = new TxnEntityId(logTxnId, logRecord.getDatasetId(),
- logRecord.getPKHashValue(), logRecord.getPKValue(), logRecord.getPKValueSize(),
- true);
+ loserEntity =
+ new TxnEntityId(logTxnId, logRecord.getDatasetId(), logRecord.getPKHashValue(),
+ logRecord.getPKValue(), logRecord.getPKValueSize(), true);
undoLSNSet = new LinkedList<>();
jobLoserEntity2LSNsMap.put(loserEntity, undoLSNSet);
}
@@ -680,8 +679,8 @@
private static void undo(ILogRecord logRecord, IDatasetLifecycleManager datasetLifecycleManager) {
try {
- ILSMIndex index = (ILSMIndex) datasetLifecycleManager.getIndex(logRecord.getDatasetId(),
- logRecord.getResourceId());
+ ILSMIndex index =
+ (ILSMIndex) datasetLifecycleManager.getIndex(logRecord.getDatasetId(), logRecord.getResourceId());
ILSMIndexAccessor indexAccessor = index.createAccessor(NoOpIndexAccessParameters.INSTANCE);
if (logRecord.getNewOp() == AbstractIndexModificationOperationCallback.INSERT_BYTE) {
indexAccessor.forceDelete(logRecord.getNewValue());
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java
index 54b1a01..980375d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/NcLifecycleCoordinator.java
@@ -146,9 +146,8 @@
final List<INCLifecycleTask> tasks = new ArrayList<>();
if (state == SystemState.CORRUPTED) {
//need to perform local recovery for node partitions
- LocalRecoveryTask rt = new LocalRecoveryTask(
- Arrays.asList(clusterManager.getNodePartitions(nodeId)).stream()
- .map(ClusterPartition::getPartitionId).collect(Collectors.toSet()));
+ LocalRecoveryTask rt = new LocalRecoveryTask(Arrays.asList(clusterManager.getNodePartitions(nodeId))
+ .stream().map(ClusterPartition::getPartitionId).collect(Collectors.toSet()));
tasks.add(rt);
}
if (replicationEnabled) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
index 62e7a69..3c7182d 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/replication/message/RegistrationTasksRequestMessage.java
@@ -48,8 +48,8 @@
public static void send(CcId ccId, NodeControllerService cs, NodeStatus nodeStatus, SystemState systemState)
throws HyracksDataException {
try {
- RegistrationTasksRequestMessage msg = new RegistrationTasksRequestMessage(cs.getId(), nodeStatus,
- systemState);
+ RegistrationTasksRequestMessage msg =
+ new RegistrationTasksRequestMessage(cs.getId(), nodeStatus, systemState);
((INCMessageBroker) cs.getContext().getMessageBroker()).sendMessageToCC(ccId, msg);
} catch (Exception e) {
LOGGER.log(Level.ERROR, "Unable to send RegistrationTasksRequestMessage to CC", e);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/resource/RequiredCapacityVisitor.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/resource/RequiredCapacityVisitor.java
index 3a6bfee..89fbcb2 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/resource/RequiredCapacityVisitor.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/app/resource/RequiredCapacityVisitor.java
@@ -314,8 +314,8 @@
addOutputBuffer(op);
return;
}
- stageMemorySoFar += 2L * MAX_BUFFER_PER_CONNECTION * numComputationPartitions * numComputationPartitions
- * frameSize;
+ stageMemorySoFar +=
+ 2L * MAX_BUFFER_PER_CONNECTION * numComputationPartitions * numComputationPartitions * frameSize;
clusterCapacity.setAggregatedMemoryByteSize(stageMemorySoFar);
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
index 14a5fe0..aa6bbdd 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/drivers/AsterixClientDriver.java
@@ -64,9 +64,8 @@
boolean onlyPhysical, boolean createBinaryRuntime) throws Exception {
ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
FileReader reader = new FileReader(filename);
- AsterixJavaClient q =
- new AsterixJavaClient(null, hcc, reader, compilationProvider, new DefaultStatementExecutorFactory(),
- new StorageComponentProvider());
+ AsterixJavaClient q = new AsterixJavaClient(null, hcc, reader, compilationProvider,
+ new DefaultStatementExecutorFactory(), new StorageComponentProvider());
q.compile(optimize, true, true, true, onlyPhysical, createBinaryRuntime, createBinaryRuntime);
return q;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
index 1c7bfb7..6baf488 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplication.java
@@ -124,8 +124,8 @@
if (args.length > 0) {
throw new IllegalArgumentException("Unrecognized argument(s): " + Arrays.toString(args));
}
- final ClusterControllerService controllerService = (ClusterControllerService) ccServiceCtx
- .getControllerService();
+ final ClusterControllerService controllerService =
+ (ClusterControllerService) ccServiceCtx.getControllerService();
ccServiceCtx.setMessageBroker(new CCMessageBroker(controllerService));
configureLoggingLevel(ccServiceCtx.getAppConfig().getLoggingLevel(ExternalProperties.Option.LOG_LEVEL));
@@ -137,8 +137,8 @@
hcc = new HyracksConnection(strIP, port);
MetadataBuiltinFunctions.init();
ILibraryManager libraryManager = new ExternalLibraryManager();
- ReplicationProperties repProp = new ReplicationProperties(
- PropertiesAccessor.getInstance(ccServiceCtx.getAppConfig()));
+ ReplicationProperties repProp =
+ new ReplicationProperties(PropertiesAccessor.getInstance(ccServiceCtx.getAppConfig()));
INcLifecycleCoordinator lifecycleCoordinator = createNcLifeCycleCoordinator(repProp.isReplicationEnabled());
ExternalLibraryUtils.setUpExternaLibraries(libraryManager, false);
componentProvider = new StorageComponentProvider();
@@ -225,8 +225,8 @@
}
protected HttpServer setupJSONAPIServer(ExternalProperties externalProperties) throws Exception {
- HttpServer jsonAPIServer = new HttpServer(webManager.getBosses(), webManager.getWorkers(),
- externalProperties.getAPIServerPort());
+ HttpServer jsonAPIServer =
+ new HttpServer(webManager.getBosses(), webManager.getWorkers(), externalProperties.getAPIServerPort());
jsonAPIServer.setAttribute(HYRACKS_CONNECTION_ATTR, hcc);
jsonAPIServer.setAttribute(ASTERIX_APP_CONTEXT_INFO_ATTR, appCtx);
jsonAPIServer.setAttribute(ServletConstants.EXECUTOR_SERVICE_ATTR,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
index 932f47c..22c89c5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ClusterLifecycleListener.java
@@ -136,8 +136,8 @@
case REMOVE_NODE:
nodesToRemove.addAll(((RemoveNodeWork) w).getNodesToBeRemoved());
nodeRemovalRequests.add(w);
- RemoveNodeWorkResponse response = new RemoveNodeWorkResponse((RemoveNodeWork) w,
- Status.IN_PROGRESS);
+ RemoveNodeWorkResponse response =
+ new RemoveNodeWorkResponse((RemoveNodeWork) w, Status.IN_PROGRESS);
pendingWorkResponses.add(response);
break;
}
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
index 8e753c8..e2f1eaf 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
@@ -144,8 +144,8 @@
if (!dataverse.getDataverseName().equals(MetadataConstants.METADATA_DATAVERSE_NAME)) {
MetadataProvider metadataProvider = new MetadataProvider(appCtx, dataverse);
try {
- List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
- dataverse.getDataverseName());
+ List<Dataset> datasets =
+ MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverse.getDataverseName());
for (Dataset dataset : datasets) {
if (dataset.getDatasetType() == DatasetType.EXTERNAL) {
// External dataset
@@ -157,8 +157,8 @@
TransactionState datasetState = dsd.getState();
if (!indexes.isEmpty()) {
if (datasetState == TransactionState.BEGIN) {
- List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx,
- dataset);
+ List<ExternalFile> files =
+ MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
// if persumed abort, roll backward
// 1. delete all pending files
for (ExternalFile file : files) {
@@ -169,8 +169,8 @@
}
// 2. clean artifacts in NCs
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- JobSpecification jobSpec = ExternalIndexingOperations.buildAbortOp(dataset, indexes,
- metadataProvider);
+ JobSpecification jobSpec =
+ ExternalIndexingOperations.buildAbortOp(dataset, indexes, metadataProvider);
executeHyracksJob(jobSpec);
// 3. correct the dataset state
((ExternalDatasetDetails) dataset.getDatasetDetails()).setState(TransactionState.COMMIT);
@@ -178,13 +178,13 @@
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
} else if (datasetState == TransactionState.READY_TO_COMMIT) {
- List<ExternalFile> files = MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx,
- dataset);
+ List<ExternalFile> files =
+ MetadataManager.INSTANCE.getDatasetExternalFiles(mdTxnCtx, dataset);
// if ready to commit, roll forward
// 1. commit indexes in NCs
metadataProvider.setMetadataTxnContext(mdTxnCtx);
- JobSpecification jobSpec = ExternalIndexingOperations.buildRecoverOp(dataset, indexes,
- metadataProvider);
+ JobSpecification jobSpec =
+ ExternalIndexingOperations.buildRecoverOp(dataset, indexes, metadataProvider);
executeHyracksJob(jobSpec);
// 2. add pending files in metadata
for (ExternalFile file : files) {
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
index 988c7bb..22458d3 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/messaging/NCMessageBroker.java
@@ -59,8 +59,8 @@
appContext = (INcApplicationContext) ncs.getApplicationContext();
maxMsgSize = messagingProperties.getFrameSize();
int messagingMemoryBudget = messagingProperties.getFrameSize() * messagingProperties.getFrameCount();
- messagingFramePool = new ConcurrentFramePool(ncs.getId(), messagingMemoryBudget,
- messagingProperties.getFrameSize());
+ messagingFramePool =
+ new ConcurrentFramePool(ncs.getId(), messagingMemoryBudget, messagingProperties.getFrameSize());
receivedMsgsQ = new LinkedBlockingQueue<>();
futureIdGenerator = new AtomicLong();
futureMap = new LongObjectHashMap<>();
@@ -79,8 +79,7 @@
}
@Override
- public void sendMessageToNC(String nodeId, INcAddressedMessage message)
- throws Exception {
+ public void sendMessageToNC(String nodeId, INcAddressedMessage message) throws Exception {
IChannelControlBlock messagingChannel = ncs.getMessagingNetworkManager().getMessagingChannel(nodeId);
sendMessageToChannel(messagingChannel, message);
}
@@ -161,8 +160,7 @@
Thread.currentThread().interrupt();
} catch (Exception e) {
if (LOGGER.isWarnEnabled() && msg != null) {
- LOGGER.log(Level.WARN, "Could not process message : "
- + msg, e);
+ LOGGER.log(Level.WARN, "Could not process message : " + msg, e);
} else {
if (LOGGER.isWarnEnabled()) {
LOGGER.log(Level.WARN, "Could not process message", e);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
index 73c841e..f470949 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/DataverseUtil.java
@@ -35,8 +35,8 @@
public static JobSpecification dropDataverseJobSpec(Dataverse dataverse, MetadataProvider metadata) {
JobSpecification jobSpec = RuntimeUtils.createJobSpecification(metadata.getApplicationContext());
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadata
- .splitAndConstraints(dataverse.getDataverseName());
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
+ metadata.splitAndConstraints(dataverse.getDataverseName());
FileRemoveOperatorDescriptor frod = new FileRemoveOperatorDescriptor(jobSpec, splitsAndConstraint.first, false);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(jobSpec, frod, splitsAndConstraint.second);
jobSpec.addRoot(frod);
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
index 186cc94..b6371dc 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FeedOperations.java
@@ -198,10 +198,9 @@
VariableExpr fromTermLeftExpr = new VariableExpr(fromVarId);
// TODO: remove target feedid from args list (xikui)
// TODO: Get rid of this INTAKE
- List<Expression> exprList =
- addArgs(feedConnection.getDataverseName(), feedConnection.getFeedId().getEntityName(),
- feedConnection.getFeedId().getEntityName(), FeedRuntimeType.INTAKE.toString(),
- feedConnection.getDatasetName(), feedConnection.getOutputType());
+ List<Expression> exprList = addArgs(feedConnection.getDataverseName(),
+ feedConnection.getFeedId().getEntityName(), feedConnection.getFeedId().getEntityName(),
+ FeedRuntimeType.INTAKE.toString(), feedConnection.getDatasetName(), feedConnection.getOutputType());
CallExpr datasrouceCallFunction = new CallExpr(new FunctionSignature(BuiltinFunctions.FEED_COLLECT), exprList);
FromTerm fromterm = new FromTerm(datasrouceCallFunction, fromTermLeftExpr, null, null);
FromClause fromClause = new FromClause(Arrays.asList(fromterm));
@@ -290,9 +289,9 @@
String datasetName = feedConnections.get(iter1).getDatasetName();
FeedConnectionId feedConnectionId = new FeedConnectionId(ingestionOp.getEntityId(), datasetName);
- FeedPolicyEntity feedPolicyEntity = FeedMetadataUtil
- .validateIfPolicyExists(curFeedConnection.getDataverseName(), curFeedConnection.getPolicyName(),
- metadataProvider.getMetadataTxnContext());
+ FeedPolicyEntity feedPolicyEntity =
+ FeedMetadataUtil.validateIfPolicyExists(curFeedConnection.getDataverseName(),
+ curFeedConnection.getPolicyName(), metadataProvider.getMetadataTxnContext());
for (Map.Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operatorsMap.entrySet()) {
IOperatorDescriptor opDesc = entry.getValue();
@@ -344,8 +343,8 @@
});
// make connections between operators
- for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>,
- Pair<IOperatorDescriptor, Integer>>> entry : subJob.getConnectorOperatorMap().entrySet()) {
+ for (Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : subJob
+ .getConnectorOperatorMap().entrySet()) {
ConnectorDescriptorId newId = connectorIdMapping.get(entry.getKey());
IConnectorDescriptor connDesc = jobSpec.getConnectorMap().get(newId);
Pair<IOperatorDescriptor, Integer> leftOp = entry.getValue().getLeft();
@@ -423,8 +422,7 @@
}
// jobEventListenerFactory
- jobSpec.setJobletEventListenerFactory(
- new MultiTransactionJobletEventListenerFactory(txnIdMap, true));
+ jobSpec.setJobletEventListenerFactory(new MultiTransactionJobletEventListenerFactory(txnIdMap, true));
// useConnectorSchedulingPolicy
jobSpec.setUseConnectorPolicyForScheduling(jobsList.get(0).isUseConnectorPolicyForScheduling());
// connectorAssignmentPolicy
@@ -436,9 +434,8 @@
SessionOutput sessionOutput) {
List<Statement> stmts = new ArrayList<>();
DefaultStatementExecutorFactory qtFactory = new DefaultStatementExecutorFactory();
- IStatementExecutor translator = qtFactory
- .create(metadataProvider.getApplicationContext(), stmts, sessionOutput, new SqlppCompilationProvider(),
- new StorageComponentProvider());
+ IStatementExecutor translator = qtFactory.create(metadataProvider.getApplicationContext(), stmts, sessionOutput,
+ new SqlppCompilationProvider(), new StorageComponentProvider());
return translator;
}
@@ -456,15 +453,15 @@
String[] ingestionLocations = ingestionAdaptorFactory.getPartitionConstraint().getLocations();
// Add metadata configs
metadataProvider.getConfig().put(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS, Boolean.TRUE.toString());
- metadataProvider.getConfig()
- .put(FeedActivityDetails.COLLECT_LOCATIONS, StringUtils.join(ingestionLocations, ','));
+ metadataProvider.getConfig().put(FeedActivityDetails.COLLECT_LOCATIONS,
+ StringUtils.join(ingestionLocations, ','));
// TODO: Once we deprecated AQL, this extra queryTranslator can be removed.
IStatementExecutor translator =
getSQLPPTranslator(metadataProvider, ((QueryTranslator) statementExecutor).getSessionOutput());
// Add connection job
for (FeedConnection feedConnection : feedConnections) {
- JobSpecification connectionJob = getConnectionJob(metadataProvider, feedConnection, translator, hcc,
- insertFeed);
+ JobSpecification connectionJob =
+ getConnectionJob(metadataProvider, feedConnection, translator, hcc, insertFeed);
jobsList.add(connectionJob);
}
return Pair.of(combineIntakeCollectJobs(metadataProvider, feed, intakeJob, jobsList, feedConnections,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
index 4137fbd..f446c4b 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/FlushDatasetUtil.java
@@ -61,13 +61,13 @@
new IPushRuntimeFactory[] { new EmptyTupleSourceRuntimeFactory() }, rDescs);
TxnId txnId = TxnIdFactory.create();
- FlushDatasetOperatorDescriptor flushOperator = new FlushDatasetOperatorDescriptor(spec, txnId,
- dataset.getDatasetId());
+ FlushDatasetOperatorDescriptor flushOperator =
+ new FlushDatasetOperatorDescriptor(spec, txnId, dataset.getDatasetId());
spec.connect(new OneToOneConnectorDescriptor(spec), emptySource, 0, flushOperator, 0);
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint = metadataProvider
- .getSplitProviderAndConstraints(dataset, dataset.getDatasetName());
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> primarySplitsAndConstraint =
+ metadataProvider.getSplitProviderAndConstraints(dataset, dataset.getDatasetName());
AlgebricksPartitionConstraint primaryPartitionConstraint = primarySplitsAndConstraint.second;
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, emptySource,
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
index 16ffd40..80275a5 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/RebalanceUtil.java
@@ -119,9 +119,9 @@
if (!targetNcNames.isEmpty()) {
// Creates a node group for rebalance.
- String nodeGroupName = DatasetUtil
- .createNodeGroupForNewDataset(sourceDataset.getDataverseName(), sourceDataset.getDatasetName(),
- sourceDataset.getRebalanceCount() + 1, targetNcNames, metadataProvider);
+ String nodeGroupName = DatasetUtil.createNodeGroupForNewDataset(sourceDataset.getDataverseName(),
+ sourceDataset.getDatasetName(), sourceDataset.getRebalanceCount() + 1, targetNcNames,
+ metadataProvider);
// The target dataset for rebalance.
targetDataset = sourceDataset.getTargetDatasetForRebalance(nodeGroupName);
@@ -338,8 +338,7 @@
Dataset target) throws AlgebricksException {
int[] primaryKeyFields = getPrimaryKeyPermutationForUpsert(target);
return new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
- new IPushRuntimeFactory[] {
- target.getCommitRuntimeFactory(metadataProvider, primaryKeyFields, true) },
+ new IPushRuntimeFactory[] { target.getCommitRuntimeFactory(metadataProvider, primaryKeyFields, true) },
new RecordDescriptor[] { target.getPrimaryRecordDescriptor(metadataProvider) });
}
@@ -405,9 +404,9 @@
dropDatasetFiles(dataset, metadataProvider, hcc);
// drop dataset entry from metadata
- runMetadataTransaction(metadataProvider, () -> MetadataManager.INSTANCE
- .dropDataset(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(),
- dataset.getDatasetName()));
+ runMetadataTransaction(metadataProvider,
+ () -> MetadataManager.INSTANCE.dropDataset(metadataProvider.getMetadataTxnContext(),
+ dataset.getDataverseName(), dataset.getDatasetName()));
MetadataManager.INSTANCE.commitTransaction(metadataProvider.getMetadataTxnContext());
// try to drop the dataset's node group
runMetadataTransaction(metadataProvider, () -> tryDropDatasetNodegroup(dataset, metadataProvider));
diff --git a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/ResourceUtils.java b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/ResourceUtils.java
index 1763a98..89c4c76 100644
--- a/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/ResourceUtils.java
+++ b/asterixdb/asterix-app/src/main/java/org/apache/asterix/utils/ResourceUtils.java
@@ -54,8 +54,7 @@
*/
public static IClusterCapacity getRequiredCapacity(ILogicalPlan plan,
AlgebricksAbsolutePartitionConstraint computationLocations, int sortFrameLimit, int groupFrameLimit,
- int joinFrameLimit, int frameSize)
- throws AlgebricksException {
+ int joinFrameLimit, int frameSize) throws AlgebricksException {
// Creates a cluster capacity visitor.
IClusterCapacity clusterCapacity = new ClusterCapacity();
RequiredCapacityVisitor visitor = new RequiredCapacityVisitor(computationLocations.getLocations().length,
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/APIFrameworkTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/APIFrameworkTest.java
index 90fc646..2d3de1c 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/APIFrameworkTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/APIFrameworkTest.java
@@ -67,10 +67,10 @@
APIFramework apiFramework = new APIFramework(mock(ILangCompilationProvider.class));
// Tests large storage locations.
- AlgebricksAbsolutePartitionConstraint storageLocations = new AlgebricksAbsolutePartitionConstraint(
- new String[] { "node1", "node1", "node2" });
- AlgebricksAbsolutePartitionConstraint computationLocations = (AlgebricksAbsolutePartitionConstraint) PA
- .invokeMethod(apiFramework,
+ AlgebricksAbsolutePartitionConstraint storageLocations =
+ new AlgebricksAbsolutePartitionConstraint(new String[] { "node1", "node1", "node2" });
+ AlgebricksAbsolutePartitionConstraint computationLocations =
+ (AlgebricksAbsolutePartitionConstraint) PA.invokeMethod(apiFramework,
"chooseLocations(" + IClusterInfoCollector.class.getName() + ",int,"
+ AlgebricksAbsolutePartitionConstraint.class.getName() + ")",
clusterInfoCollector, CompilerProperties.COMPILER_PARALLELISM_AS_STORAGE, storageLocations);
@@ -114,8 +114,8 @@
APIFramework apiFramework = new APIFramework(mock(ILangCompilationProvider.class));
// Tests odd number parallelism.
- AlgebricksAbsolutePartitionConstraint loc = (AlgebricksAbsolutePartitionConstraint) PA.invokeMethod(
- apiFramework, "getComputationLocations(java.util.Map,int)", map, 5);
+ AlgebricksAbsolutePartitionConstraint loc = (AlgebricksAbsolutePartitionConstraint) PA
+ .invokeMethod(apiFramework, "getComputationLocations(java.util.Map,int)", map, 5);
int nc1Count = 0, nc2Count = 0;
String[] partitions = loc.getLocations();
for (String partition : partitions) {
@@ -194,8 +194,8 @@
jobSpec.getUserConstraints().add(new Constraint(lValueMock, nc1Location));
final String[] clusterLocation = new String[] { nc1, nc2 };
- final AlgebricksAbsolutePartitionConstraint jobLocations = APIFramework
- .getJobLocations(jobSpec, nodeJobTracker, new AlgebricksAbsolutePartitionConstraint(clusterLocation));
+ final AlgebricksAbsolutePartitionConstraint jobLocations = APIFramework.getJobLocations(jobSpec, nodeJobTracker,
+ new AlgebricksAbsolutePartitionConstraint(clusterLocation));
// ensure nc2 wasn't included
Assert.assertEquals(1, jobLocations.getLocations().length);
Assert.assertEquals(nc1, jobLocations.getLocations()[0]);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
index 97079eb..ae82ea7 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/common/AsterixHyracksIntegrationUtil.java
@@ -65,8 +65,8 @@
public static final int DEFAULT_HYRACKS_CC_CLIENT_PORT = 1098;
public static final int DEFAULT_HYRACKS_CC_CLUSTER_PORT = 1099;
- public static final String DEFAULT_CONF_FILE = joinPath(getProjectPath().toString(), "src", "test", "resources",
- "cc.conf");
+ public static final String DEFAULT_CONF_FILE =
+ joinPath(getProjectPath().toString(), "src", "test", "resources", "cc.conf");
private static final String DEFAULT_STORAGE_PATH = joinPath("target", "io", "dir");
private static String storagePath = DEFAULT_STORAGE_PATH;
@@ -135,8 +135,8 @@
ncConfigManager = new ConfigManager(new String[] { "-config-file", confFile });
}
ncApplication.registerConfig(ncConfigManager);
- nodeControllers.add(new NodeControllerService(fixupIODevices(createNCConfig(nodeId, ncConfigManager)),
- ncApplication));
+ nodeControllers.add(
+ new NodeControllerService(fixupIODevices(createNCConfig(nodeId, ncConfigManager)), ncApplication));
}
opts.stream().forEach(opt -> configManager.set(opt.getLeft(), opt.getRight()));
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
index 87ee2ef..06d92b7 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorApiServletTest.java
@@ -152,7 +152,8 @@
nodeMap.put("asterix_nc2", mockInfo2);
PA.invokeMethod(let,
"formResponseObject(" + ObjectNode.class.getName() + ", " + FileSplit.class.getName() + "[], "
- + ARecordType.class.getName() + ", " + String.class.getName() + ", " + Map.class.getName() + ")",
+ + ARecordType.class.getName() + ", " + String.class.getName() + ", " + Map.class.getName()
+ + ")",
actualResponse, splits, recordType, primaryKey, nodeMap);
// Constructs expected response.
ObjectNode expectedResponse = om.createObjectNode();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/QueryCancellationServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/QueryCancellationServletTest.java
index 3cb46fe..d5262cf 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/QueryCancellationServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/QueryCancellationServletTest.java
@@ -47,8 +47,8 @@
@Test
public void testDelete() throws Exception {
// Creates a query cancellation servlet.
- QueryCancellationServlet cancellationServlet = new QueryCancellationServlet(new ConcurrentHashMap<>(),
- new String[] { "/" });
+ QueryCancellationServlet cancellationServlet =
+ new QueryCancellationServlet(new ConcurrentHashMap<>(), new String[] { "/" });
// Adds mocked Hyracks client connection into the servlet context.
IHyracksClientConnection mockHcc = mock(IHyracksClientConnection.class);
cancellationServlet.ctx().put(ServletConstants.HYRACKS_CONNECTION_ATTR, mockHcc);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
index e583c75..976dbc3 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/VersionApiServletTest.java
@@ -62,7 +62,6 @@
SqlppExecutionTest.tearDown();
}
-
@Test
public void testGet() throws Exception {
// Configures a test version api servlet.
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
index 23a3eda..69ce2ea 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/app/bootstrap/TestNodeController.java
@@ -178,29 +178,30 @@
int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators,
StorageComponentProvider storageComponentProvider, Index secondaryIndex)
throws AlgebricksException, HyracksDataException, RemoteException, ACIDException {
- CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc
- .getApplicationContext();
+ CcApplicationContext appCtx =
+ (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
MetadataProvider mdProvider = new MetadataProvider(appCtx, null);
try {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil
- .getMergePolicyFactory(dataset, mdTxnCtx);
+ org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy =
+ DatasetUtil.getMergePolicyFactory(dataset, mdTxnCtx);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType,
mergePolicy.first, mergePolicy.second, filterFields, primaryKeyIndexes, primaryKeyIndicators);
IndexOperation op = IndexOperation.INSERT;
- IModificationOperationCallbackFactory modOpCallbackFactory = new PrimaryIndexModificationOperationCallbackFactory(
- dataset.getDatasetId(), primaryIndexInfo.primaryKeyIndexes, TXN_SUBSYSTEM_PROVIDER,
- Operation.get(op), ResourceType.LSM_BTREE);
+ IModificationOperationCallbackFactory modOpCallbackFactory =
+ new PrimaryIndexModificationOperationCallbackFactory(dataset.getDatasetId(),
+ primaryIndexInfo.primaryKeyIndexes, TXN_SUBSYSTEM_PROVIDER, Operation.get(op),
+ ResourceType.LSM_BTREE);
IRecordDescriptorProvider recordDescProvider = primaryIndexInfo.getInsertRecordDescriptorProvider();
- RecordDescriptor recordDesc = recordDescProvider
- .getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0);
+ RecordDescriptor recordDesc =
+ recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0);
IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory(
storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider());
- LSMInsertDeleteOperatorNodePushable insertOp = new LSMInsertDeleteOperatorNodePushable(ctx,
- ctx.getTaskAttemptId().getTaskId().getPartition(),
- primaryIndexInfo.primaryIndexInsertFieldsPermutations, recordDesc, op, true, indexHelperFactory,
- modOpCallbackFactory, null);
+ LSMInsertDeleteOperatorNodePushable insertOp =
+ new LSMInsertDeleteOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(),
+ primaryIndexInfo.primaryIndexInsertFieldsPermutations, recordDesc, op, true,
+ indexHelperFactory, modOpCallbackFactory, null);
// For now, this assumes a single secondary index. recordDesc is always <pk-record-meta>
// for the index, we will have to create an assign operator that extract the sk
@@ -208,8 +209,8 @@
if (secondaryIndex != null) {
List<List<String>> skNames = secondaryIndex.getKeyFieldNames();
List<Integer> indicators = secondaryIndex.getKeyFieldSourceIndicators();
- IScalarEvaluatorFactory[] secondaryFieldAccessEvalFactories = new IScalarEvaluatorFactory[skNames
- .size()];
+ IScalarEvaluatorFactory[] secondaryFieldAccessEvalFactories =
+ new IScalarEvaluatorFactory[skNames.size()];
for (int i = 0; i < skNames.size(); i++) {
ARecordType sourceType = dataset.hasMetaPart()
? indicators.get(i).intValue() == Index.RECORD_INDICATOR ? recordType : metaType
@@ -232,17 +233,18 @@
for (int i = 0; i < primaryIndexInfo.index.getKeyFieldNames().size(); i++) {
projectionList[projCount++] = i;
}
- IPushRuntime assignOp = new AssignRuntimeFactory(outColumns, secondaryFieldAccessEvalFactories,
- projectionList, true).createPushRuntime(ctx);
+ IPushRuntime assignOp =
+ new AssignRuntimeFactory(outColumns, secondaryFieldAccessEvalFactories, projectionList, true)
+ .createPushRuntime(ctx);
insertOp.setOutputFrameWriter(0, assignOp, primaryIndexInfo.rDesc);
assignOp.setInputRecordDescriptor(0, primaryIndexInfo.rDesc);
SecondaryIndexInfo secondaryIndexInfo = new SecondaryIndexInfo(primaryIndexInfo, secondaryIndex);
IIndexDataflowHelperFactory secondaryIndexHelperFactory = new IndexDataflowHelperFactory(
storageComponentProvider.getStorageManager(), secondaryIndexInfo.fileSplitProvider);
- LSMInsertDeleteOperatorNodePushable secondaryInsertOp = new LSMInsertDeleteOperatorNodePushable(ctx,
- ctx.getTaskAttemptId().getTaskId().getPartition(), secondaryIndexInfo.insertFieldsPermutations,
- secondaryIndexInfo.rDesc, op, false, secondaryIndexHelperFactory,
- NoOpOperationCallbackFactory.INSTANCE, null);
+ LSMInsertDeleteOperatorNodePushable secondaryInsertOp =
+ new LSMInsertDeleteOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(),
+ secondaryIndexInfo.insertFieldsPermutations, secondaryIndexInfo.rDesc, op, false,
+ secondaryIndexHelperFactory, NoOpOperationCallbackFactory.INSTANCE, null);
assignOp.setOutputFrameWriter(0, secondaryInsertOp, secondaryIndexInfo.rDesc);
CommitRuntime commitOp = new CommitRuntime(ctx, getTxnJobId(ctx), dataset.getDatasetId(),
secondaryIndexInfo.primaryKeyIndexes, true, ctx.getTaskAttemptId().getTaskId().getPartition(),
@@ -277,9 +279,9 @@
BTreeSearchOperatorDescriptor searchOpDesc = new BTreeSearchOperatorDescriptor(spec, primaryIndexInfo.rDesc,
null, null, true, true, indexDataflowHelperFactory, false, false, null,
NoOpOperationCallbackFactory.INSTANCE, filterFields, filterFields, false);
- BTreeSearchOperatorNodePushable searchOp = searchOpDesc.createPushRuntime(ctx,
- primaryIndexInfo.getSearchRecordDescriptorProvider(), ctx.getTaskAttemptId().getTaskId().getPartition(),
- 1);
+ BTreeSearchOperatorNodePushable searchOp =
+ searchOpDesc.createPushRuntime(ctx, primaryIndexInfo.getSearchRecordDescriptorProvider(),
+ ctx.getTaskAttemptId().getTaskId().getPartition(), 1);
emptyTupleOp.setOutputFrameWriter(0, searchOp,
primaryIndexInfo.getSearchRecordDescriptorProvider().getInputRecordDescriptor(null, 0));
searchOp.setOutputFrameWriter(0, countOp, primaryIndexInfo.rDesc);
@@ -299,8 +301,8 @@
Dataverse dataverse = new Dataverse(dataset.getDataverseName(), NonTaggedDataFormat.class.getName(),
MetadataUtil.PENDING_NO_OP);
Index index = primaryIndexInfo.getIndex();
- CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc
- .getApplicationContext();
+ CcApplicationContext appCtx =
+ (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
MetadataProvider mdProvider = new MetadataProvider(appCtx, dataverse);
try {
return dataset.getResourceFactory(mdProvider, index, primaryIndexInfo.recordType, primaryIndexInfo.metaType,
@@ -315,8 +317,8 @@
int[] primaryKeyIndexes, List<Integer> primaryKeyIndicators, int partition)
throws AlgebricksException, HyracksDataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil
- .getMergePolicyFactory(dataset, mdTxnCtx);
+ org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy =
+ DatasetUtil.getMergePolicyFactory(dataset, mdTxnCtx);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, primaryKeyTypes, recordType, metaType,
mergePolicy.first, mergePolicy.second, filterFields, primaryKeyIndexes, primaryKeyIndicators);
@@ -327,9 +329,9 @@
try {
IResourceFactory resourceFactory = dataset.getResourceFactory(mdProvider, primaryIndexInfo.index,
recordType, metaType, mergePolicy.first, mergePolicy.second);
- IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(
- storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider(),
- resourceFactory, true);
+ IndexBuilderFactory indexBuilderFactory =
+ new IndexBuilderFactory(storageComponentProvider.getStorageManager(),
+ primaryIndexInfo.getFileSplitProvider(), resourceFactory, true);
IHyracksTaskContext ctx = createTestContext(newJobId(), partition, false);
IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition);
indexBuilder.build();
@@ -343,8 +345,8 @@
IStorageComponentProvider storageComponentProvider, int partition)
throws AlgebricksException, HyracksDataException, RemoteException, ACIDException {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil
- .getMergePolicyFactory(primaryIndexInfo.dataset, mdTxnCtx);
+ org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy =
+ DatasetUtil.getMergePolicyFactory(primaryIndexInfo.dataset, mdTxnCtx);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
Dataverse dataverse = new Dataverse(primaryIndexInfo.dataset.getDataverseName(),
NonTaggedDataFormat.class.getName(), MetadataUtil.PENDING_NO_OP);
@@ -355,9 +357,9 @@
IResourceFactory resourceFactory = primaryIndexInfo.dataset.getResourceFactory(mdProvider, secondaryIndex,
primaryIndexInfo.recordType, primaryIndexInfo.metaType, mergePolicy.first, mergePolicy.second);
- IndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(
- storageComponentProvider.getStorageManager(), secondaryIndexInfo.fileSplitProvider, resourceFactory,
- true);
+ IndexBuilderFactory indexBuilderFactory =
+ new IndexBuilderFactory(storageComponentProvider.getStorageManager(),
+ secondaryIndexInfo.fileSplitProvider, resourceFactory, true);
IHyracksTaskContext ctx = createTestContext(newJobId(), partition, false);
IIndexBuilder indexBuilder = indexBuilderFactory.create(ctx, partition);
indexBuilder.build();
@@ -372,8 +374,8 @@
int i = 0;
ISerializerDeserializer<?>[] primaryIndexSerdes = new ISerializerDeserializer<?>[primaryIndexNumOfTupleFields];
for (; i < primaryKeyTypes.length; i++) {
- primaryIndexSerdes[i] = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(primaryKeyTypes[i]);
+ primaryIndexSerdes[i] =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(primaryKeyTypes[i]);
}
primaryIndexSerdes[i++] = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(recordType);
if (metaType != null) {
@@ -384,16 +386,16 @@
public static ISerializerDeserializer<?>[] createSecondaryIndexSerdes(ARecordType recordType, ARecordType metaType,
IAType[] primaryKeyTypes, IAType[] secondaryKeyTypes) {
- ISerializerDeserializer<?>[] secondaryIndexSerdes = new ISerializerDeserializer<?>[secondaryKeyTypes.length
- + primaryKeyTypes.length];
+ ISerializerDeserializer<?>[] secondaryIndexSerdes =
+ new ISerializerDeserializer<?>[secondaryKeyTypes.length + primaryKeyTypes.length];
int i = 0;
for (; i < secondaryKeyTypes.length; i++) {
- secondaryIndexSerdes[i] = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(secondaryKeyTypes[i]);
+ secondaryIndexSerdes[i] =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(secondaryKeyTypes[i]);
}
for (; i < primaryKeyTypes.length; i++) {
- secondaryIndexSerdes[i] = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(primaryKeyTypes[i]);
+ secondaryIndexSerdes[i] =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(primaryKeyTypes[i]);
}
return secondaryIndexSerdes;
}
@@ -439,8 +441,8 @@
ctx = Mockito.spy(ctx);
Mockito.when(ctx.getJobletContext()).thenReturn(jobletCtx);
Mockito.when(ctx.getIoManager()).thenReturn(ExecutionTestUtil.integrationUtil.ncs[0].getIoManager());
- TaskAttemptId taskId = new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(0), 0), partition),
- 0);
+ TaskAttemptId taskId =
+ new TaskAttemptId(new TaskId(new ActivityId(new OperatorDescriptorId(0), 0), partition), 0);
Mockito.when(ctx.getTaskAttemptId()).thenReturn(taskId);
return ctx;
}
@@ -474,8 +476,8 @@
this.primaryIndexInfo = primaryIndexInfo;
this.secondaryIndex = secondaryIndex;
List<String> nodes = Collections.singletonList(ExecutionTestUtil.integrationUtil.ncs[0].getId());
- CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc
- .getApplicationContext();
+ CcApplicationContext appCtx =
+ (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
FileSplit[] splits = SplitsAndConstraintsUtil.getIndexSplits(appCtx.getClusterStateManager(),
primaryIndexInfo.dataset, secondaryIndex.getIndexName(), nodes);
fileSplitProvider = new ConstantFileSplitProvider(splits);
@@ -529,10 +531,10 @@
this.mergePolicyProperties = mergePolicyProperties;
this.primaryKeyIndexes = primaryKeyIndexes;
primaryIndexNumOfTupleFields = primaryKeyTypes.length + (1 + ((metaType == null) ? 0 : 1));
- primaryIndexTypeTraits = createPrimaryIndexTypeTraits(primaryIndexNumOfTupleFields, primaryKeyTypes,
- recordType, metaType);
- primaryIndexSerdes = createPrimaryIndexSerdes(primaryIndexNumOfTupleFields, primaryKeyTypes, recordType,
- metaType);
+ primaryIndexTypeTraits =
+ createPrimaryIndexTypeTraits(primaryIndexNumOfTupleFields, primaryKeyTypes, recordType, metaType);
+ primaryIndexSerdes =
+ createPrimaryIndexSerdes(primaryIndexNumOfTupleFields, primaryKeyTypes, recordType, metaType);
rDesc = new RecordDescriptor(primaryIndexSerdes, primaryIndexTypeTraits);
primaryIndexInsertFieldsPermutations = new int[primaryIndexNumOfTupleFields];
for (int i = 0; i < primaryIndexNumOfTupleFields; i++) {
@@ -542,16 +544,16 @@
List<IAType> keyFieldTypes = Arrays.asList(primaryKeyTypes);
for (int i = 0; i < primaryKeyIndicators.size(); i++) {
Integer indicator = primaryKeyIndicators.get(i);
- String[] fieldNames = indicator == Index.RECORD_INDICATOR ? recordType.getFieldNames()
- : metaType.getFieldNames();
+ String[] fieldNames =
+ indicator == Index.RECORD_INDICATOR ? recordType.getFieldNames() : metaType.getFieldNames();
keyFieldNames.add(Arrays.asList(fieldNames[primaryKeyIndexes[i]]));
}
index = new Index(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getDatasetName(),
IndexType.BTREE, keyFieldNames, primaryKeyIndicators, keyFieldTypes, false, false, true,
MetadataUtil.PENDING_NO_OP);
List<String> nodes = Collections.singletonList(ExecutionTestUtil.integrationUtil.ncs[0].getId());
- CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc
- .getApplicationContext();
+ CcApplicationContext appCtx =
+ (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
FileSplit[] splits = SplitsAndConstraintsUtil.getIndexSplits(appCtx.getClusterStateManager(), dataset,
index.getIndexName(), nodes);
fileSplitProvider = new ConstantFileSplitProvider(splits);
@@ -572,8 +574,8 @@
ISerializerDeserializer<?>[] primaryKeySerdes = new ISerializerDeserializer<?>[primaryKeyTypes.length];
for (int i = 0; i < primaryKeyTypes.length; i++) {
primaryKeyTypeTraits[i] = TypeTraitProvider.INSTANCE.getTypeTrait(primaryKeyTypes[i]);
- primaryKeySerdes[i] = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(primaryKeyTypes[i]);
+ primaryKeySerdes[i] =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(primaryKeyTypes[i]);
}
RecordDescriptor searcgRecDesc = new RecordDescriptor(primaryKeySerdes, primaryKeyTypeTraits);
IRecordDescriptorProvider rDescProvider = Mockito.mock(IRecordDescriptorProvider.class);
@@ -589,10 +591,10 @@
public RecordDescriptor getSearchOutputDesc(IAType[] keyTypes, ARecordType recordType, ARecordType metaType) {
int primaryIndexNumOfTupleFields = keyTypes.length + (1 + ((metaType == null) ? 0 : 1));
- ITypeTraits[] primaryIndexTypeTraits = createPrimaryIndexTypeTraits(primaryIndexNumOfTupleFields, keyTypes,
- recordType, metaType);
- ISerializerDeserializer<?>[] primaryIndexSerdes = createPrimaryIndexSerdes(primaryIndexNumOfTupleFields,
- keyTypes, recordType, metaType);
+ ITypeTraits[] primaryIndexTypeTraits =
+ createPrimaryIndexTypeTraits(primaryIndexNumOfTupleFields, keyTypes, recordType, metaType);
+ ISerializerDeserializer<?>[] primaryIndexSerdes =
+ createPrimaryIndexSerdes(primaryIndexNumOfTupleFields, keyTypes, recordType, metaType);
return new RecordDescriptor(primaryIndexSerdes, primaryIndexTypeTraits);
}
@@ -603,8 +605,8 @@
}
public IStorageManager getStorageManager() {
- CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc
- .getApplicationContext();
+ CcApplicationContext appCtx =
+ (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
return appCtx.getStorageManager();
}
@@ -613,8 +615,8 @@
int[] keyIndexes, List<Integer> keyIndicators, StorageComponentProvider storageComponentProvider,
IFrameOperationCallbackFactory frameOpCallbackFactory, boolean hasSecondaries) throws Exception {
MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
- org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy = DatasetUtil
- .getMergePolicyFactory(dataset, mdTxnCtx);
+ org.apache.hyracks.algebricks.common.utils.Pair<ILSMMergePolicyFactory, Map<String, String>> mergePolicy =
+ DatasetUtil.getMergePolicyFactory(dataset, mdTxnCtx);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
PrimaryIndexInfo primaryIndexInfo = new PrimaryIndexInfo(dataset, keyTypes, recordType, metaType,
mergePolicy.first, mergePolicy.second, filterFields, keyIndexes, keyIndicators);
@@ -625,13 +627,13 @@
IRecordDescriptorProvider recordDescProvider = primaryIndexInfo.getInsertRecordDescriptorProvider();
IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory(
storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider());
- LSMPrimaryUpsertOperatorNodePushable insertOp = new LSMPrimaryUpsertOperatorNodePushable(ctx,
- ctx.getTaskAttemptId().getTaskId().getPartition(), indexHelperFactory,
- primaryIndexInfo.primaryIndexInsertFieldsPermutations,
- recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0),
- modificationCallbackFactory, searchCallbackFactory, keyIndexes.length, recordType, -1,
- frameOpCallbackFactory == null ? dataset.getFrameOpCallbackFactory() : frameOpCallbackFactory,
- MissingWriterFactory.INSTANCE, hasSecondaries);
+ LSMPrimaryUpsertOperatorNodePushable insertOp =
+ new LSMPrimaryUpsertOperatorNodePushable(ctx, ctx.getTaskAttemptId().getTaskId().getPartition(),
+ indexHelperFactory, primaryIndexInfo.primaryIndexInsertFieldsPermutations,
+ recordDescProvider.getInputRecordDescriptor(new ActivityId(new OperatorDescriptorId(0), 0), 0),
+ modificationCallbackFactory, searchCallbackFactory, keyIndexes.length, recordType, -1,
+ frameOpCallbackFactory == null ? dataset.getFrameOpCallbackFactory() : frameOpCallbackFactory,
+ MissingWriterFactory.INSTANCE, hasSecondaries);
RecordDescriptor upsertOutRecDesc = getUpsertOutRecDesc(primaryIndexInfo.rDesc, dataset,
filterFields == null ? 0 : filterFields.length, recordType, metaType);
// fix pk fields
@@ -649,8 +651,8 @@
private RecordDescriptor getUpsertOutRecDesc(RecordDescriptor inputRecordDesc, Dataset dataset, int numFilterFields,
ARecordType itemType, ARecordType metaItemType) throws Exception {
- ITypeTraits[] outputTypeTraits = new ITypeTraits[inputRecordDesc.getFieldCount()
- + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
+ ITypeTraits[] outputTypeTraits =
+ new ITypeTraits[inputRecordDesc.getFieldCount() + (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
ISerializerDeserializer<?>[] outputSerDes = new ISerializerDeserializer[inputRecordDesc.getFieldCount()
+ (dataset.hasMetaPart() ? 2 : 1) + numFilterFields];
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
index 6d114c6..ab8969e 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/common/TestDataUtil.java
@@ -80,7 +80,8 @@
fields.forEach((fName, fType) -> stringBuilder.append(fName).append(":").append(fType).append(","));
stringBuilder.deleteCharAt(stringBuilder.length() - 1);
TEST_EXECUTOR.executeSqlppUpdateOrDdl("CREATE TYPE dsType AS {" + stringBuilder + "};", OUTPUT_FORMAT);
- TEST_EXECUTOR.executeSqlppUpdateOrDdl("CREATE DATASET " + dataset + "(dsType) PRIMARY KEY " + PKName + ";", OUTPUT_FORMAT);
+ TEST_EXECUTOR.executeSqlppUpdateOrDdl("CREATE DATASET " + dataset + "(dsType) PRIMARY KEY " + PKName + ";",
+ OUTPUT_FORMAT);
}
/**
@@ -90,7 +91,8 @@
* @throws Exception
*/
public static void createPrimaryIndex(String dataset, String indexName) throws Exception {
- TEST_EXECUTOR.executeSqlppUpdateOrDdl("CREATE PRIMARY INDEX " + indexName + " ON " + dataset + ";", OUTPUT_FORMAT);
+ TEST_EXECUTOR.executeSqlppUpdateOrDdl("CREATE PRIMARY INDEX " + indexName + " ON " + dataset + ";",
+ OUTPUT_FORMAT);
}
/**
@@ -101,7 +103,8 @@
* @throws Exception
*/
public static void createSecondaryBTreeIndex(String dataset, String indexName, String SKName) throws Exception {
- TEST_EXECUTOR.executeSqlppUpdateOrDdl("CREATE INDEX " + indexName + " ON " + dataset + "(" + SKName + ");", OUTPUT_FORMAT);
+ TEST_EXECUTOR.executeSqlppUpdateOrDdl("CREATE INDEX " + indexName + " ON " + dataset + "(" + SKName + ");",
+ OUTPUT_FORMAT);
}
/**
@@ -127,8 +130,8 @@
*/
public static long getDatasetCount(String datasetName) throws Exception {
final String query = "SELECT VALUE COUNT(*) FROM `" + datasetName + "`;";
- final InputStream responseStream = TEST_EXECUTOR
- .executeQueryService(query, TEST_EXECUTOR.getEndpoint(Servlets.QUERY_SERVICE), OUTPUT_FORMAT);
+ final InputStream responseStream = TEST_EXECUTOR.executeQueryService(query,
+ TEST_EXECUTOR.getEndpoint(Servlets.QUERY_SERVICE), OUTPUT_FORMAT);
final ObjectNode response = OBJECT_MAPPER.readValue(responseStream, ObjectNode.class);
final JsonNode result = response.get("results");
// make sure there is a single value in result
@@ -209,8 +212,8 @@
(ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext();
final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
try {
- return SplitsAndConstraintsUtil
- .getIndexSplits(dataset, dataset.getDatasetName(), mdTxnCtx, ccAppCtx.getClusterStateManager());
+ return SplitsAndConstraintsUtil.getIndexSplits(dataset, dataset.getDatasetName(), mdTxnCtx,
+ ccAppCtx.getClusterStateManager());
} finally {
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ClusterStateManagerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ClusterStateManagerTest.java
index ca314aa..4dd7463 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ClusterStateManagerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ClusterStateManagerTest.java
@@ -207,7 +207,8 @@
Mockito.when(iccServiceContext.getAppConfig()).thenReturn(applicationConfig);
Mockito.when(ccApplicationContext.getServiceContext()).thenReturn(iccServiceContext);
- NcLifecycleCoordinator coordinator = new NcLifecycleCoordinator(ccApplicationContext.getServiceContext(), false);
+ NcLifecycleCoordinator coordinator =
+ new NcLifecycleCoordinator(ccApplicationContext.getServiceContext(), false);
coordinator.bindTo(csm);
Mockito.when(ccApplicationContext.getNcLifecycleCoordinator()).thenReturn(coordinator);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ExceptionIT.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ExceptionIT.java
index 4bcee04..baaf546 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ExceptionIT.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/runtime/ExceptionIT.java
@@ -60,8 +60,8 @@
}
private void testFunction(IFunctionDescriptorFactory funcFactory) throws Exception {
- AbstractScalarFunctionDynamicDescriptor funcDesc = (AbstractScalarFunctionDynamicDescriptor) funcFactory
- .createFunctionDescriptor();
+ AbstractScalarFunctionDynamicDescriptor funcDesc =
+ (AbstractScalarFunctionDynamicDescriptor) funcFactory.createFunctionDescriptor();
int inputArity = funcDesc.getIdentifier().getArity();
Iterator<IScalarEvaluatorFactory[]> argEvalFactoryIterator = getArgCombinations(inputArity);
while (argEvalFactoryIterator.hasNext()) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java
index df7756b..dfb92b7 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/active/ActiveStatsTest.java
@@ -71,16 +71,16 @@
public void refreshStatsTest() throws Exception {
// Entities to be used
EntityId entityId = new EntityId("MockExtension", "MockDataverse", "MockEntity");
- ActiveRuntimeId activeRuntimeId = new ActiveRuntimeId(entityId,
- FeedIntakeOperatorNodePushable.class.getSimpleName(), 0);
+ ActiveRuntimeId activeRuntimeId =
+ new ActiveRuntimeId(entityId, FeedIntakeOperatorNodePushable.class.getSimpleName(), 0);
List<Dataset> datasetList = new ArrayList<>();
- AlgebricksAbsolutePartitionConstraint partitionConstraint = new AlgebricksAbsolutePartitionConstraint(
- new String[] { "asterix_nc1" });
+ AlgebricksAbsolutePartitionConstraint partitionConstraint =
+ new AlgebricksAbsolutePartitionConstraint(new String[] { "asterix_nc1" });
String requestedStats;
- CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc
- .getApplicationContext();
- ActiveNotificationHandler activeJobNotificationHandler = (ActiveNotificationHandler) appCtx
- .getActiveNotificationHandler();
+ CcApplicationContext appCtx =
+ (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext();
+ ActiveNotificationHandler activeJobNotificationHandler =
+ (ActiveNotificationHandler) appCtx.getActiveNotificationHandler();
JobId jobId = new JobId(1);
// Mock ActiveRuntime
@@ -104,8 +104,8 @@
entityId, datasetList, partitionConstraint, FeedIntakeOperatorNodePushable.class.getSimpleName(),
NoRetryPolicyFactory.INSTANCE, null, Collections.emptyList());
// Register mock runtime
- NCAppRuntimeContext nc1AppCtx = (NCAppRuntimeContext) ExecutionTestUtil.integrationUtil.ncs[0]
- .getApplicationContext();
+ NCAppRuntimeContext nc1AppCtx =
+ (NCAppRuntimeContext) ExecutionTestUtil.integrationUtil.ncs[0].getApplicationContext();
nc1AppCtx.getActiveManager().registerRuntime(mockRuntime);
// Check init stats
@@ -116,8 +116,8 @@
eventsListener.refreshStats(1000);
requestedStats = eventsListener.getStats();
Assert.assertTrue(requestedStats.contains("N/A"));
- WaitForStateSubscriber startingSubscriber = new WaitForStateSubscriber(eventsListener,
- Collections.singleton(ActivityState.STARTING));
+ WaitForStateSubscriber startingSubscriber =
+ new WaitForStateSubscriber(eventsListener, Collections.singleton(ActivityState.STARTING));
// Update stats of created/started job without joined partition
TestUserActor user = new TestUserActor("Xikui", mdProvider, null);
Action start = user.startActivity(eventsListener);
@@ -128,8 +128,8 @@
requestedStats = eventsListener.getStats();
Assert.assertTrue(requestedStats.contains("N/A"));
// Fake partition message and notify eventListener
- ActivePartitionMessage partitionMessage = new ActivePartitionMessage(activeRuntimeId, jobId,
- Event.RUNTIME_REGISTERED, null);
+ ActivePartitionMessage partitionMessage =
+ new ActivePartitionMessage(activeRuntimeId, jobId, Event.RUNTIME_REGISTERED, null);
partitionMessage.handle(appCtx);
start.sync();
if (start.hasFailed()) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java
index 57a446a..a7c9c40 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/aql/AQLTestSuite.java
@@ -29,10 +29,11 @@
import org.apache.commons.lang3.StringUtils;
public class AQLTestSuite extends TestSuite {
- private static String AQLTS_PATH = StringUtils.join(new String[] { "src", "test", "resources", "parserts",
- "queries" + File.separator }, File.separator);
- private static String AQLTS_SQL_LIKE_PATH = StringUtils.join(new String[] { "src", "test", "resources", "parserts",
- "queries-sql-like" + File.separator }, File.separator);
+ private static String AQLTS_PATH = StringUtils
+ .join(new String[] { "src", "test", "resources", "parserts", "queries" + File.separator }, File.separator);
+ private static String AQLTS_SQL_LIKE_PATH = StringUtils.join(
+ new String[] { "src", "test", "resources", "parserts", "queries-sql-like" + File.separator },
+ File.separator);
public static Test suite() throws ParseException, UnsupportedEncodingException, FileNotFoundException {
File testData = new File(AQLTS_PATH);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/CancellationTestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/CancellationTestExecutor.java
index b4f9ded..41aa23f 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/CancellationTestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/CancellationTestExecutor.java
@@ -112,7 +112,8 @@
queryCount.increment();
return false;
} else {
- System.err.println("Expected to find one of the following in error text:\n+++++\n" + expectedErrors + "\n+++++");
+ System.err.println(
+ "Expected to find one of the following in error text:\n+++++\n" + expectedErrors + "\n+++++");
return true;
}
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
index 4a3444e..deb2c72 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/common/TestExecutor.java
@@ -123,7 +123,8 @@
private static final Pattern HTTP_PARAM_PATTERN = Pattern.compile("param (\\w+)=(.*)", Pattern.MULTILINE);
private static final Pattern HTTP_BODY_PATTERN = Pattern.compile("body=(.*)", Pattern.MULTILINE);
private static final Pattern HTTP_STATUSCODE_PATTERN = Pattern.compile("statuscode (.*)", Pattern.MULTILINE);
- private static final Pattern MAX_RESULT_READS_PATTERN = Pattern.compile("maxresultreads=(\\d+)(\\D|$)", Pattern.MULTILINE);
+ private static final Pattern MAX_RESULT_READS_PATTERN =
+ Pattern.compile("maxresultreads=(\\d+)(\\D|$)", Pattern.MULTILINE);
public static final int TRUNCATE_THRESHOLD = 16384;
public static final String DELIVERY_ASYNC = "async";
@@ -1003,7 +1004,7 @@
ctx.extension(), cUnit.getOutputDir().getCompare());
break;
case "server": // (start <test server name> <port>
- // [<arg1>][<arg2>][<arg3>]...|stop (<port>|all))
+ // [<arg1>][<arg2>][<arg3>]...|stop (<port>|all))
try {
lines = statement.trim().split("\n");
String[] command = lines[lines.length - 1].trim().split(" ");
@@ -1051,7 +1052,7 @@
}
break;
case "lib": // expected format <dataverse-name> <library-name>
- // <library-directory>
+ // <library-directory>
// TODO: make this case work well with entity names containing spaces by
// looking for \"
lines = statement.split("\n");
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ComponentRollbackTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ComponentRollbackTest.java
index fca0848..9828424 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ComponentRollbackTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/ComponentRollbackTest.java
@@ -90,8 +90,8 @@
private static final IAType[] KEY_TYPES = { BuiltinType.AINT32 };
private static final ARecordType RECORD_TYPE = new ARecordType("TestRecordType", new String[] { "key", "value" },
new IAType[] { BuiltinType.AINT32, BuiltinType.AINT64 }, false);
- private static final GenerationFunction[] RECORD_GEN_FUNCTION = { GenerationFunction.DETERMINISTIC,
- GenerationFunction.DETERMINISTIC };
+ private static final GenerationFunction[] RECORD_GEN_FUNCTION =
+ { GenerationFunction.DETERMINISTIC, GenerationFunction.DETERMINISTIC };
private static final boolean[] UNIQUE_RECORD_FIELDS = { true, false };
private static final ARecordType META_TYPE = null;
private static final GenerationFunction[] META_GEN_FUNCTION = null;
@@ -152,14 +152,14 @@
List<List<String>> partitioningKeys = new ArrayList<>();
partitioningKeys.add(Collections.singletonList("key"));
int partition = 0;
- dataset = new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME,
- NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null, PartitioningStrategy.HASH,
- partitioningKeys, null, null, null, false, null),
+ dataset = new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME,
+ NODE_GROUP_NAME, NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null,
+ PartitioningStrategy.HASH, partitioningKeys, null, null, null, false, null),
null, DatasetType.INTERNAL, DATASET_ID, 0);
PrimaryIndexInfo primaryIndexInfo = nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, null,
storageManager, KEY_INDEXES, KEY_INDICATORS_LIST, partition);
- IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(),
- primaryIndexInfo.getFileSplitProvider());
+ IndexDataflowHelperFactory iHelperFactory =
+ new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider());
JobId jobId = nc.newJobId();
ctx = nc.createTestContext(jobId, partition, false);
indexDataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(), partition);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LogMarkerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LogMarkerTest.java
index 5e32a10..0a968c8 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LogMarkerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/LogMarkerTest.java
@@ -75,8 +75,8 @@
private static final IAType[] KEY_TYPES = { BuiltinType.AINT32 };
private static final ARecordType RECORD_TYPE = new ARecordType("TestRecordType", new String[] { "key", "value" },
new IAType[] { BuiltinType.AINT32, BuiltinType.AINT64 }, false);
- private static final GenerationFunction[] RECORD_GEN_FUNCTION = { GenerationFunction.DETERMINISTIC,
- GenerationFunction.DETERMINISTIC };
+ private static final GenerationFunction[] RECORD_GEN_FUNCTION =
+ { GenerationFunction.DETERMINISTIC, GenerationFunction.DETERMINISTIC };
private static final boolean[] UNIQUE_RECORD_FIELDS = { true, false };
private static final ARecordType META_TYPE = null;
private static final GenerationFunction[] META_GEN_FUNCTION = null;
@@ -150,10 +150,10 @@
}
insertOp.close();
nc.getTransactionManager().commitTransaction(txnCtx.getTxnId());
- IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(),
- indexInfo.getFileSplitProvider());
- IIndexDataflowHelper dataflowHelper = iHelperFactory.create(ctx.getJobletContext().getServiceContext(),
- 0);
+ IndexDataflowHelperFactory iHelperFactory =
+ new IndexDataflowHelperFactory(nc.getStorageManager(), indexInfo.getFileSplitProvider());
+ IIndexDataflowHelper dataflowHelper =
+ iHelperFactory.create(ctx.getJobletContext().getServiceContext(), 0);
dataflowHelper.open();
LSMBTree btree = (LSMBTree) dataflowHelper.getIndexInstance();
LongPointable longPointable = LongPointable.FACTORY.createPointable();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
index 703eb85..b39a5c6 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/MultiPartitionLSMIndexTest.java
@@ -86,8 +86,8 @@
private static final IAType[] KEY_TYPES = { BuiltinType.AINT32 };
private static final ARecordType RECORD_TYPE = new ARecordType("TestRecordType", new String[] { "key", "value" },
new IAType[] { BuiltinType.AINT32, BuiltinType.AINT64 }, false);
- private static final GenerationFunction[] RECORD_GEN_FUNCTION = { GenerationFunction.DETERMINISTIC,
- GenerationFunction.DETERMINISTIC };
+ private static final GenerationFunction[] RECORD_GEN_FUNCTION =
+ { GenerationFunction.DETERMINISTIC, GenerationFunction.DETERMINISTIC };
private static final boolean[] UNIQUE_RECORD_FIELDS = { true, false };
private static final ARecordType META_TYPE = null;
private static final GenerationFunction[] META_GEN_FUNCTION = null;
@@ -104,8 +104,8 @@
private static final String DATA_TYPE_NAME = "DUMMY";
private static final String NODE_GROUP_NAME = "DEFAULT";
private static final IndexType INDEX_TYPE = IndexType.BTREE;
- private static final List<List<String>> INDEX_FIELD_NAMES = Arrays
- .asList(Arrays.asList(RECORD_TYPE.getFieldNames()[1]));
+ private static final List<List<String>> INDEX_FIELD_NAMES =
+ Arrays.asList(Arrays.asList(RECORD_TYPE.getFieldNames()[1]));
private static final List<Integer> INDEX_FIELD_INDICATORS = Arrays.asList(Index.RECORD_INDICATOR);
private static final List<IAType> INDEX_FIELD_TYPES = Arrays.asList(BuiltinType.AINT64);
private static final StorageComponentProvider storageManager = new StorageComponentProvider();
@@ -147,9 +147,9 @@
public void createIndex() throws Exception {
List<List<String>> partitioningKeys = new ArrayList<>();
partitioningKeys.add(Collections.singletonList("key"));
- dataset = new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME,
- NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null, PartitioningStrategy.HASH,
- partitioningKeys, null, null, null, false, null),
+ dataset = new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME,
+ NODE_GROUP_NAME, NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null,
+ PartitioningStrategy.HASH, partitioningKeys, null, null, null, false, null),
null, DatasetType.INTERNAL, DATASET_ID, 0);
secondaryIndex = new Index(DATAVERSE_NAME, DATASET_NAME, INDEX_NAME, INDEX_TYPE, INDEX_FIELD_NAMES,
INDEX_FIELD_INDICATORS, INDEX_FIELD_TYPES, false, false, false, 0);
@@ -167,18 +167,18 @@
taskCtxs[i] = nc.createTestContext(jobId, i, false);
PrimaryIndexInfo primaryIndexInfo = nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, null,
storageManager, KEY_INDEXES, KEY_INDICATORS_LIST, i);
- SecondaryIndexInfo secondaryIndexInfo = nc.createSecondaryIndex(primaryIndexInfo, secondaryIndex,
- storageManager, i);
- IndexDataflowHelperFactory iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(),
- primaryIndexInfo.getFileSplitProvider());
- primaryIndexDataflowHelpers[i] = iHelperFactory.create(taskCtxs[i].getJobletContext().getServiceContext(),
- i);
+ SecondaryIndexInfo secondaryIndexInfo =
+ nc.createSecondaryIndex(primaryIndexInfo, secondaryIndex, storageManager, i);
+ IndexDataflowHelperFactory iHelperFactory =
+ new IndexDataflowHelperFactory(nc.getStorageManager(), primaryIndexInfo.getFileSplitProvider());
+ primaryIndexDataflowHelpers[i] =
+ iHelperFactory.create(taskCtxs[i].getJobletContext().getServiceContext(), i);
primaryIndexDataflowHelpers[i].open();
primaryLsmBtrees[i] = (TestLsmBtree) primaryIndexDataflowHelpers[i].getIndexInstance();
- iHelperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(),
- secondaryIndexInfo.getFileSplitProvider());
- secondaryIndexDataflowHelpers[i] = iHelperFactory.create(taskCtxs[i].getJobletContext().getServiceContext(),
- i);
+ iHelperFactory =
+ new IndexDataflowHelperFactory(nc.getStorageManager(), secondaryIndexInfo.getFileSplitProvider());
+ secondaryIndexDataflowHelpers[i] =
+ iHelperFactory.create(taskCtxs[i].getJobletContext().getServiceContext(), i);
secondaryIndexDataflowHelpers[i].open();
secondaryLsmBtrees[i] = (TestLsmBtree) secondaryIndexDataflowHelpers[i].getIndexInstance();
secondaryIndexDataflowHelpers[i].close();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java
index 652616a..7bc7a88 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dataflow/SearchCursorComponentSwitchTest.java
@@ -121,11 +121,10 @@
public void createIndex() throws Exception {
List<List<String>> partitioningKeys = new ArrayList<>();
partitioningKeys.add(Collections.singletonList("key"));
- dataset =
- new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME,
- NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null, PartitioningStrategy.HASH,
- partitioningKeys, null, null, null, false, null),
- null, DatasetType.INTERNAL, DATASET_ID, 0);
+ dataset = new TestDataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME,
+ NODE_GROUP_NAME, NoMergePolicyFactory.NAME, null, new InternalDatasetDetails(null,
+ PartitioningStrategy.HASH, partitioningKeys, null, null, null, false, null),
+ null, DatasetType.INTERNAL, DATASET_ID, 0);
PrimaryIndexInfo primaryIndexInfo = nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE, META_TYPE, null,
storageManager, KEY_INDEXES, KEY_INDICATORS_LIST, 0);
IndexDataflowHelperFactory iHelperFactory =
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
index dc8e228..0affaf2 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/ddl/SecondaryBTreeOperationsHelperTest.java
@@ -86,8 +86,8 @@
// create a secondary primary index
TestDataUtil.createPrimaryIndex(datasetName, primaryIndexName);
- index = metadataProvider
- .getIndex(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName, primaryIndexName);
+ index = metadataProvider.getIndex(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName,
+ primaryIndexName);
Assert.assertNotNull(index);
jobSpecification = IndexUtil.buildSecondaryIndexLoadingJobSpec(dataset, index, metadataProvider);
jobSpecification.getOperatorMap().values().forEach(iOperatorDescriptor -> {
@@ -96,8 +96,8 @@
// create a normal BTree index
TestDataUtil.createSecondaryBTreeIndex(datasetName, secondaryIndexName, SKFieldName);
- index = metadataProvider
- .getIndex(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName, secondaryIndexName);
+ index = metadataProvider.getIndex(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName,
+ secondaryIndexName);
Assert.assertNotNull(index);
jobSpecification = IndexUtil.buildSecondaryIndexLoadingJobSpec(dataset, index, metadataProvider);
final long numOfSortOperators = jobSpecification.getOperatorMap().values().stream()
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
index facf03d1..3b636c6 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/dml/DmlTest.java
@@ -41,8 +41,8 @@
private static final String[] ASTERIX_DATA_DIRS = new String[] { "nc1data", "nc2data" };
private static final String PATH_ACTUAL = "dmltest" + File.separator;
private static final String SEPARATOR = File.separator;
- private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "dmlts"
- + SEPARATOR;
+ private static final String PATH_BASE =
+ "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "dmlts" + SEPARATOR;
private static final String PATH_SCRIPTS = PATH_BASE + "scripts" + SEPARATOR;
private static final String LOAD_FOR_ENLIST_FILE = PATH_SCRIPTS + "load-cust.aql";
@@ -60,12 +60,12 @@
outdir.mkdirs();
integrationUtil.init(true, AsterixHyracksIntegrationUtil.DEFAULT_CONF_FILE);
- Reader loadReader = new BufferedReader(
- new InputStreamReader(new FileInputStream(LOAD_FOR_ENLIST_FILE), "UTF-8"));
- AsterixJavaClient asterixLoad = new AsterixJavaClient(
- (ICcApplicationContext) integrationUtil.cc.getApplicationContext(),
- integrationUtil.getHyracksClientConnection(), loadReader, ERR, new AqlCompilationProvider(),
- new DefaultStatementExecutorFactory(), new StorageComponentProvider());
+ Reader loadReader =
+ new BufferedReader(new InputStreamReader(new FileInputStream(LOAD_FOR_ENLIST_FILE), "UTF-8"));
+ AsterixJavaClient asterixLoad =
+ new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(),
+ integrationUtil.getHyracksClientConnection(), loadReader, ERR, new AqlCompilationProvider(),
+ new DefaultStatementExecutorFactory(), new StorageComponentProvider());
try {
asterixLoad.compile(true, false, false, false, false, true, false);
} catch (AsterixException e) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java
index 7965afb..e7b6271 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/jsonplan/JsonLogicalPlanTest.java
@@ -1 +1,235 @@
-/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.test.jsonplan;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
import org.apache.asterix.api.java.AsterixJavaClient;
import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
import org.apache.asterix.common.config.GlobalConfig;
import org.apache.asterix.common.context.IStorageComponentProvider;
import org.apache.asterix.common.dataflow.ICcApplicationContext;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.compiler.provider.AqlCompilationProvider;
import org.apache.asterix.compiler.provider.ILangCompilationProvider;
import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
import org.apache.asterix.external.util.ExternalDataConstants;
import org.apache.asterix.external.util.IdentitiyResolverFactory;
import org.apache.asterix.file.StorageComponentProvider;
import org.apache.asterix.test.base.AsterixTestHelper;
import org.apache.asterix.test.common.TestHelper;
import org.apache.asterix.test.runtime.HDFSCluster;
import org.apache.asterix.translator.IStatementExecutorFactory;
import org.apache.asterix.translator.SessionConfig.PlanFormat;
import org.apache.hyracks.api.client.IHyracksClientConnection;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.internal.AssumptionViolatedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
@RunWith(Parameterized.class)
public class JsonLogicalPlanTest {
private static final Logger LOGGER = LogManager.getLogger();
protected static final String SEPARATOR = File.separator;
private static final String EXTENSION_AQL = "aql";
private static final String EXTENSION_SQLPP = "sqlpp";
private static final String EXTENSION_RESULT = "plan";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
+ "optimizerts" + SEPARATOR;
private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
protected static String PATH_ACTUAL = "target" + File.separator + "jplantest" + SEPARATOR;
protected static boolean optimized = false;
private static final ArrayList<String> ignore = AsterixTestHelper.readTestListFile(FILENAME_IGNORE, PATH_BASE);
private static final ArrayList<String> only = AsterixTestHelper.readTestListFile(FILENAME_ONLY, PATH_BASE);
protected static final String TEST_CONFIG_FILE_NAME = "src/main/resources/cc.conf";
private static final ILangCompilationProvider aqlCompilationProvider = new AqlCompilationProvider();
private static final ILangCompilationProvider sqlppCompilationProvider = new SqlppCompilationProvider();
protected static ILangCompilationProvider extensionLangCompilationProvider = null;
protected static IStatementExecutorFactory statementExecutorFactory = new DefaultStatementExecutorFactory();
protected static IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
protected static AsterixHyracksIntegrationUtil integrationUtil = new AsterixHyracksIntegrationUtil();
@BeforeClass
public static void setUp() throws Exception {
System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
final File outdir = new File(PATH_ACTUAL);
outdir.mkdirs();
HDFSCluster.getInstance().setup();
integrationUtil.init(true, TEST_CONFIG_FILE_NAME);
// Set the node resolver to be the identity resolver that expects node names
// to be node controller ids; a valid assumption in test environment.
System.setProperty(ExternalDataConstants.NODE_RESOLVER_FACTORY_PROPERTY,
IdentitiyResolverFactory.class.getName());
}
@AfterClass
public static void tearDown() throws Exception {
File outdir = new File(PATH_ACTUAL);
File[] files = outdir.listFiles();
if (files == null || files.length == 0) {
outdir.delete();
}
HDFSCluster.getInstance().cleanup();
integrationUtil.deinit(true);
}
private static void suiteBuildPerFile(File file, Collection<Object[]> testArgs, String path) {
if (file.isDirectory() && !file.getName().startsWith(".")) {
for (File innerfile : file.listFiles()) {
String subdir = innerfile.isDirectory() ? path + innerfile.getName() + SEPARATOR : path;
suiteBuildPerFile(innerfile, testArgs, subdir);
}
}
if (file.isFile() && (file.getName().endsWith(EXTENSION_AQL) || file.getName().endsWith(EXTENSION_SQLPP))) {
String resultFileName = AsterixTestHelper.extToResExt(file.getName(), EXTENSION_RESULT);
File actualFile = new File(PATH_ACTUAL + SEPARATOR + path + resultFileName);
testArgs.add(new Object[] { file, actualFile });
}
}
@Parameters(name = "JsonLogicalPlanTest {index}: {0}")
public static Collection<Object[]> tests() {
Collection<Object[]> testArgs = new ArrayList<>();
if (only.isEmpty()) {
suiteBuildPerFile(new File(PATH_QUERIES), testArgs, "");
} else {
for (String path : only) {
suiteBuildPerFile(new File(PATH_QUERIES + path), testArgs,
path.lastIndexOf(SEPARATOR) < 0 ? "" : path.substring(0, path.lastIndexOf(SEPARATOR) + 1));
}
}
return testArgs;
}
private final File actualFile;
private final File queryFile;
public JsonLogicalPlanTest(final File queryFile, final File actualFile) {
this.queryFile = queryFile;
this.actualFile = actualFile;
}
@Test
public void test() throws Exception {
try {
String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0),
'/');
if (!only.isEmpty()) {
boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
if (!toRun) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath()
+ "\" \"only.txt\" not empty and not in \"only.txt\".");
}
Assume.assumeTrue(toRun);
}
boolean skipped = TestHelper.isInPrefixList(ignore, queryFileShort);
if (skipped) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" in \"ignore.txt\".");
}
Assume.assumeTrue(!skipped);
LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
// Forces the creation of actualFile.
actualFile.getParentFile().mkdirs();
PrintWriter plan = new PrintWriter(actualFile);
ILangCompilationProvider provider = queryFile.getName().endsWith("aql") ? aqlCompilationProvider
: sqlppCompilationProvider;
if (extensionLangCompilationProvider != null) {
provider = extensionLangCompilationProvider;
}
IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
AsterixJavaClient asterix = new AsterixJavaClient(
(ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc, query, plan, provider,
statementExecutorFactory, storageComponentProvider);
try {
asterix.compile(true, false, !optimized, optimized, false, false, false, PlanFormat.JSON);
} catch (AsterixException e) {
plan.close();
query.close();
throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
}
plan.close();
query.close();
BufferedReader readerActual = new BufferedReader(
new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
String lineActual, objectActual = "";
boolean firstPlan = false;
while ((lineActual = readerActual.readLine()) != null) {
if (lineActual.contains("--")) {
if (firstPlan) {
break;
}
firstPlan = true;
} else {
objectActual = objectActual + lineActual;
}
}
try {
final JsonParser parser = new ObjectMapper().getJsonFactory().createJsonParser(objectActual);
while (parser.nextToken() != null) {
}
} finally {
readerActual.close();
}
} catch (Exception e) {
if (!(e instanceof AssumptionViolatedException)) {
LOGGER.error("Test \"" + queryFile.getPath() + "\" FAILED!");
throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
} else {
throw e;
}
}
}
}
\ No newline at end of file
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.asterix.test.jsonplan;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.PrintWriter;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Collection;
+
+import org.apache.asterix.api.common.AsterixHyracksIntegrationUtil;
+import org.apache.asterix.api.java.AsterixJavaClient;
+import org.apache.asterix.app.translator.DefaultStatementExecutorFactory;
+import org.apache.asterix.common.config.GlobalConfig;
+import org.apache.asterix.common.context.IStorageComponentProvider;
+import org.apache.asterix.common.dataflow.ICcApplicationContext;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.IdentitiyResolverFactory;
+import org.apache.asterix.file.StorageComponentProvider;
+import org.apache.asterix.test.base.AsterixTestHelper;
+import org.apache.asterix.test.common.TestHelper;
+import org.apache.asterix.test.runtime.HDFSCluster;
+import org.apache.asterix.translator.IStatementExecutorFactory;
+import org.apache.asterix.translator.SessionConfig.PlanFormat;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.junit.AfterClass;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.internal.AssumptionViolatedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+@RunWith(Parameterized.class)
+public class JsonLogicalPlanTest {
+
+ private static final Logger LOGGER = LogManager.getLogger();
+
+ protected static final String SEPARATOR = File.separator;
+ private static final String EXTENSION_AQL = "aql";
+ private static final String EXTENSION_SQLPP = "sqlpp";
+ private static final String EXTENSION_RESULT = "plan";
+ private static final String FILENAME_IGNORE = "ignore.txt";
+ private static final String FILENAME_ONLY = "only.txt";
+ private static final String PATH_BASE =
+ "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "optimizerts" + SEPARATOR;
+ private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
+ protected static String PATH_ACTUAL = "target" + File.separator + "jplantest" + SEPARATOR;
+ protected static boolean optimized = false;
+
+ private static final ArrayList<String> ignore = AsterixTestHelper.readTestListFile(FILENAME_IGNORE, PATH_BASE);
+ private static final ArrayList<String> only = AsterixTestHelper.readTestListFile(FILENAME_ONLY, PATH_BASE);
+ protected static final String TEST_CONFIG_FILE_NAME = "src/main/resources/cc.conf";
+ private static final ILangCompilationProvider aqlCompilationProvider = new AqlCompilationProvider();
+ private static final ILangCompilationProvider sqlppCompilationProvider = new SqlppCompilationProvider();
+ protected static ILangCompilationProvider extensionLangCompilationProvider = null;
+ protected static IStatementExecutorFactory statementExecutorFactory = new DefaultStatementExecutorFactory();
+ protected static IStorageComponentProvider storageComponentProvider = new StorageComponentProvider();
+
+ protected static AsterixHyracksIntegrationUtil integrationUtil = new AsterixHyracksIntegrationUtil();
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ System.setProperty(GlobalConfig.CONFIG_FILE_PROPERTY, TEST_CONFIG_FILE_NAME);
+ final File outdir = new File(PATH_ACTUAL);
+ outdir.mkdirs();
+
+ HDFSCluster.getInstance().setup();
+
+ integrationUtil.init(true, TEST_CONFIG_FILE_NAME);
+ // Set the node resolver to be the identity resolver that expects node names
+ // to be node controller ids; a valid assumption in test environment.
+ System.setProperty(ExternalDataConstants.NODE_RESOLVER_FACTORY_PROPERTY,
+ IdentitiyResolverFactory.class.getName());
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ File outdir = new File(PATH_ACTUAL);
+ File[] files = outdir.listFiles();
+ if (files == null || files.length == 0) {
+ outdir.delete();
+ }
+
+ HDFSCluster.getInstance().cleanup();
+
+ integrationUtil.deinit(true);
+ }
+
+ private static void suiteBuildPerFile(File file, Collection<Object[]> testArgs, String path) {
+ if (file.isDirectory() && !file.getName().startsWith(".")) {
+ for (File innerfile : file.listFiles()) {
+ String subdir = innerfile.isDirectory() ? path + innerfile.getName() + SEPARATOR : path;
+ suiteBuildPerFile(innerfile, testArgs, subdir);
+ }
+ }
+ if (file.isFile() && (file.getName().endsWith(EXTENSION_AQL) || file.getName().endsWith(EXTENSION_SQLPP))) {
+ String resultFileName = AsterixTestHelper.extToResExt(file.getName(), EXTENSION_RESULT);
+ File actualFile = new File(PATH_ACTUAL + SEPARATOR + path + resultFileName);
+ testArgs.add(new Object[] { file, actualFile });
+ }
+ }
+
+ @Parameters(name = "JsonLogicalPlanTest {index}: {0}")
+ public static Collection<Object[]> tests() {
+ Collection<Object[]> testArgs = new ArrayList<>();
+ if (only.isEmpty()) {
+ suiteBuildPerFile(new File(PATH_QUERIES), testArgs, "");
+ } else {
+ for (String path : only) {
+ suiteBuildPerFile(new File(PATH_QUERIES + path), testArgs,
+ path.lastIndexOf(SEPARATOR) < 0 ? "" : path.substring(0, path.lastIndexOf(SEPARATOR) + 1));
+ }
+ }
+ return testArgs;
+ }
+
+ private final File actualFile;
+ private final File queryFile;
+
+ public JsonLogicalPlanTest(final File queryFile, final File actualFile) {
+ this.queryFile = queryFile;
+ this.actualFile = actualFile;
+ }
+
+ @Test
+ public void test() throws Exception {
+ try {
+ String queryFileShort =
+ queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0), '/');
+ if (!only.isEmpty()) {
+ boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
+ if (!toRun) {
+ LOGGER.info("SKIP TEST: \"" + queryFile.getPath()
+ + "\" \"only.txt\" not empty and not in \"only.txt\".");
+ }
+ Assume.assumeTrue(toRun);
+ }
+ boolean skipped = TestHelper.isInPrefixList(ignore, queryFileShort);
+ if (skipped) {
+ LOGGER.info("SKIP TEST: \"" + queryFile.getPath() + "\" in \"ignore.txt\".");
+ }
+ Assume.assumeTrue(!skipped);
+
+ LOGGER.info("RUN TEST: \"" + queryFile.getPath() + "\"");
+ Reader query = new BufferedReader(new InputStreamReader(new FileInputStream(queryFile), "UTF-8"));
+
+ // Forces the creation of actualFile.
+ actualFile.getParentFile().mkdirs();
+
+ PrintWriter plan = new PrintWriter(actualFile);
+ ILangCompilationProvider provider =
+ queryFile.getName().endsWith("aql") ? aqlCompilationProvider : sqlppCompilationProvider;
+ if (extensionLangCompilationProvider != null) {
+ provider = extensionLangCompilationProvider;
+ }
+ IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
+ AsterixJavaClient asterix =
+ new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc,
+ query, plan, provider, statementExecutorFactory, storageComponentProvider);
+ try {
+ asterix.compile(true, false, !optimized, optimized, false, false, false, PlanFormat.JSON);
+
+ } catch (AsterixException e) {
+ plan.close();
+ query.close();
+ throw new Exception("Compile ERROR for " + queryFile + ": " + e.getMessage(), e);
+ }
+ plan.close();
+ query.close();
+
+ BufferedReader readerActual =
+ new BufferedReader(new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
+ String lineActual, objectActual = "";
+ boolean firstPlan = false;
+ while ((lineActual = readerActual.readLine()) != null) {
+ if (lineActual.contains("--")) {
+ if (firstPlan) {
+ break;
+ }
+ firstPlan = true;
+
+ } else {
+ objectActual = objectActual + lineActual;
+ }
+ }
+
+ try {
+ final JsonParser parser = new ObjectMapper().getJsonFactory().createJsonParser(objectActual);
+ while (parser.nextToken() != null) {
+ }
+ } finally {
+ readerActual.close();
+ }
+
+ } catch (Exception e) {
+ if (!(e instanceof AssumptionViolatedException)) {
+ LOGGER.error("Test \"" + queryFile.getPath() + "\" FAILED!");
+ throw new Exception("Test \"" + queryFile.getPath() + "\" FAILED!", e);
+ } else {
+ throw e;
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java
index f635d6f..8a2bc1d 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/logging/CheckpointingTest.java
@@ -74,8 +74,8 @@
private static final IAType[] KEY_TYPES = { BuiltinType.AINT32 };
private static final ARecordType RECORD_TYPE = new ARecordType("TestRecordType", new String[] { "key", "value" },
new IAType[] { BuiltinType.AINT32, BuiltinType.AINT64 }, false);
- private static final GenerationFunction[] RECORD_GEN_FUNCTION = { GenerationFunction.DETERMINISTIC,
- GenerationFunction.DETERMINISTIC };
+ private static final GenerationFunction[] RECORD_GEN_FUNCTION =
+ { GenerationFunction.DETERMINISTIC, GenerationFunction.DETERMINISTIC };
private static final boolean[] UNIQUE_RECORD_FIELDS = { true, false };
private static final ARecordType META_TYPE = null;
private static final GenerationFunction[] META_GEN_FUNCTION = null;
@@ -217,8 +217,8 @@
nc.init();
try {
final ITransactionSubsystem txnSubsystem = nc.getTransactionSubsystem();
- final AbstractCheckpointManager checkpointManager = (AbstractCheckpointManager) txnSubsystem
- .getCheckpointManager();
+ final AbstractCheckpointManager checkpointManager =
+ (AbstractCheckpointManager) txnSubsystem.getCheckpointManager();
// Make a checkpoint with the current minFirstLSN
final long minFirstLSN = txnSubsystem.getRecoveryManager().getMinFirstLSN();
checkpointManager.tryCheckpoint(minFirstLSN);
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java
index 0cd9473..0e8b4a9 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTest.java
@@ -43,8 +43,8 @@
private TestCaseContext tcCtx;
private static final String PATH_ACTUAL = "target" + File.separator + "mdtest" + File.separator;
- private static final String PATH_BASE = StringUtils
- .join(new String[] { "src", "test", "resources", "metadata" + File.separator }, File.separator);
+ private static final String PATH_BASE =
+ StringUtils.join(new String[] { "src", "test", "resources", "metadata" + File.separator }, File.separator);
protected static final String TEST_CONFIG_FILE_NAME = "src/main/resources/cc.conf";
private static final TestExecutor testExecutor = new TestExecutor();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
index decee99..a10c234 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/metadata/MetadataTxnTest.java
@@ -67,8 +67,8 @@
@Test
public void abortMetadataTxn() throws Exception {
- ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService()
- .getApplicationContext();
+ ICcApplicationContext appCtx =
+ (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext();
final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null);
final MetadataTransactionContext mdTxn = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxn);
@@ -95,8 +95,8 @@
@Test
public void rebalanceFailureMetadataTxn() throws Exception {
- ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService()
- .getApplicationContext();
+ ICcApplicationContext appCtx =
+ (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext();
String nodeGroup = "ng";
String datasetName = "dataset1";
final TestCaseContext.OutputFormat format = TestCaseContext.OutputFormat.CLEAN_JSON;
@@ -155,8 +155,8 @@
testExecutor.executeSqlppUpdateOrDdl("CREATE DATASET " + datasetName + "(KeyType) PRIMARY KEY id;", format);
// get created dataset
- ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService()
- .getApplicationContext();
+ ICcApplicationContext appCtx =
+ (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext();
MetadataProvider metadataProvider = new MetadataProvider(appCtx, null);
final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
@@ -199,8 +199,8 @@
Assert.assertEquals(0, failCount.get());
// make sure all metadata indexes have no pending operations after all txns committed/aborted
- final IDatasetLifecycleManager datasetLifecycleManager = ((INcApplicationContext) integrationUtil.ncs[0]
- .getApplicationContext()).getDatasetLifecycleManager();
+ final IDatasetLifecycleManager datasetLifecycleManager =
+ ((INcApplicationContext) integrationUtil.ncs[0].getApplicationContext()).getDatasetLifecycleManager();
int maxMetadatasetId = 14;
for (int i = 1; i <= maxMetadatasetId; i++) {
if (datasetLifecycleManager.getIndex(i, i) != null) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
index 8b228bd..3fd59a4 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/optimizer/OptimizerTest.java
@@ -67,8 +67,8 @@
private static final String EXTENSION_RESULT = "plan";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
- private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR
- + "optimizerts" + SEPARATOR;
+ private static final String PATH_BASE =
+ "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "optimizerts" + SEPARATOR;
private static final String PATH_QUERIES = PATH_BASE + "queries" + SEPARATOR;
private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
protected static final String PATH_ACTUAL = "target" + File.separator + "opttest" + SEPARATOR;
@@ -153,10 +153,10 @@
@Test
public void test() throws Exception {
try {
- String queryFileShort = queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0),
- '/');
+ String queryFileShort =
+ queryFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0), '/');
if (!only.isEmpty()) {
- boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
+ boolean toRun = TestHelper.isInPrefixList(only, queryFileShort);
if (!toRun) {
LOGGER.info("SKIP TEST: \"" + queryFile.getPath()
+ "\" \"only.txt\" not empty and not in \"only.txt\".");
@@ -178,15 +178,15 @@
actualFile.getParentFile().mkdirs();
PrintWriter plan = new PrintWriter(actualFile);
- ILangCompilationProvider provider = queryFile.getName().endsWith("aql") ? aqlCompilationProvider
- : sqlppCompilationProvider;
+ ILangCompilationProvider provider =
+ queryFile.getName().endsWith("aql") ? aqlCompilationProvider : sqlppCompilationProvider;
if (extensionLangCompilationProvider != null) {
provider = extensionLangCompilationProvider;
}
IHyracksClientConnection hcc = integrationUtil.getHyracksClientConnection();
- AsterixJavaClient asterix = new AsterixJavaClient(
- (ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc, query, plan, provider,
- statementExecutorFactory, storageComponentProvider);
+ AsterixJavaClient asterix =
+ new AsterixJavaClient((ICcApplicationContext) integrationUtil.cc.getApplicationContext(), hcc,
+ query, plan, provider, statementExecutorFactory, storageComponentProvider);
try {
asterix.compile(true, false, false, true, true, false, false);
} catch (AlgebricksException e) {
@@ -197,10 +197,10 @@
plan.close();
query.close();
- BufferedReader readerExpected = new BufferedReader(
- new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
- BufferedReader readerActual = new BufferedReader(
- new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
+ BufferedReader readerExpected =
+ new BufferedReader(new InputStreamReader(new FileInputStream(expectedFile), "UTF-8"));
+ BufferedReader readerActual =
+ new BufferedReader(new InputStreamReader(new FileInputStream(actualFile), "UTF-8"));
String lineExpected, lineActual;
int num = 1;
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/LangExecutionUtil.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/LangExecutionUtil.java
index 8deb827..9da0e66 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/LangExecutionUtil.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/LangExecutionUtil.java
@@ -49,8 +49,8 @@
public class LangExecutionUtil {
private static final String PATH_ACTUAL = "target" + File.separator + "rttest" + File.separator;
- private static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources", "runtimets" },
- File.separator);
+ private static final String PATH_BASE =
+ StringUtils.join(new String[] { "src", "test", "resources", "runtimets" }, File.separator);
private static final boolean cleanupOnStart = true;
private static final boolean cleanupOnStop = true;
@@ -214,8 +214,8 @@
String processId = processName.split("@")[0];
// Checks whether there are leaked run files from operators.
- Process process = Runtime.getRuntime()
- .exec(new String[] { "bash", "-c", "lsof -p " + processId + "|grep waf|wc -l" });
+ Process process =
+ Runtime.getRuntime().exec(new String[] { "bash", "-c", "lsof -p " + processId + "|grep waf|wc -l" });
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
int runFileCount = Integer.parseInt(reader.readLine().trim());
if (runFileCount != 0) {
@@ -231,8 +231,8 @@
}
private static void outputLeakedOpenFiles(String processId) throws IOException {
- Process process = Runtime.getRuntime()
- .exec(new String[] { "bash", "-c", "lsof -p " + processId + "|grep waf" });
+ Process process =
+ Runtime.getRuntime().exec(new String[] { "bash", "-c", "lsof -p " + processId + "|grep waf" });
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
String line;
while ((line = reader.readLine()) != null) {
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ReplicationExecutionTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ReplicationExecutionTest.java
index d54d448..cbca8dc 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ReplicationExecutionTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/runtime/ReplicationExecutionTest.java
@@ -60,7 +60,8 @@
final String nodeId = nc.getId();
final INcApplicationContext appCtx = (INcApplicationContext) nc.getApplicationContext();
int apiPort = appCtx.getExternalProperties().getNcApiPort();
- int replicationPort = (int) appCtx.getServiceContext().getAppConfig().get(NCConfig.Option.REPLICATION_LISTEN_PORT);
+ int replicationPort =
+ (int) appCtx.getServiceContext().getAppConfig().get(NCConfig.Option.REPLICATION_LISTEN_PORT);
ncEndPoints.put(nodeId, InetSocketAddress.createUnresolved(ip, apiPort));
replicationAddress.put(nodeId, InetSocketAddress.createUnresolved(ip, replicationPort));
}
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java
index 054da38..2088903 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/sqlpp/RuntimeParserTest.java
@@ -44,8 +44,8 @@
protected static final Logger LOGGER = LogManager.getLogger();
protected static final String PATH_ACTUAL = "target" + File.separator + "runtime_parserts" + File.separator;
- protected static final String PATH_BASE = StringUtils.join(new String[] { "src", "test", "resources", "runtimets" },
- File.separator);
+ protected static final String PATH_BASE =
+ StringUtils.join(new String[] { "src", "test", "resources", "runtimets" }, File.separator);
private final TestExecutor testExecutor = new ParserTestExecutor();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/DiskIsFullTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/DiskIsFullTest.java
index c813c43..8897169 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/DiskIsFullTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/DiskIsFullTest.java
@@ -63,8 +63,8 @@
private static final IAType[] KEY_TYPES = { BuiltinType.AINT32 };
private static final ARecordType RECORD_TYPE = new ARecordType("TestRecordType", new String[] { "key", "value" },
new IAType[] { BuiltinType.AINT32, BuiltinType.AINT64 }, false);
- private static final GenerationFunction[] RECORD_GEN_FUNCTION = { GenerationFunction.DETERMINISTIC,
- GenerationFunction.DETERMINISTIC };
+ private static final GenerationFunction[] RECORD_GEN_FUNCTION =
+ { GenerationFunction.DETERMINISTIC, GenerationFunction.DETERMINISTIC };
private static final boolean[] UNIQUE_RECORD_FIELDS = { true, false };
private static final ARecordType META_TYPE = null;
private static final GenerationFunction[] META_GEN_FUNCTION = null;
@@ -111,8 +111,8 @@
if (!shouldRun) {
return;
}
- HyracksDataException expectedException = HyracksDataException
- .create(ErrorCode.CANNOT_MODIFY_INDEX_DISK_IS_FULL);
+ HyracksDataException expectedException =
+ HyracksDataException.create(ErrorCode.CANNOT_MODIFY_INDEX_DISK_IS_FULL);
try {
TestNodeController nc = new TestNodeController(null, false);
nc.init();
diff --git a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
index 31cc77a..31a6004 100644
--- a/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
+++ b/asterixdb/asterix-app/src/test/java/org/apache/asterix/test/storage/IndexDropOperatorNodePushableTest.java
@@ -104,15 +104,15 @@
List<List<String>> partitioningKeys = new ArrayList<>();
partitioningKeys.add(Collections.singletonList("key"));
Dataset dataset = new Dataset(DATAVERSE_NAME, DATASET_NAME, DATAVERSE_NAME, DATA_TYPE_NAME, NODE_GROUP_NAME,
- NoMergePolicyFactory.NAME, null,
- new InternalDatasetDetails(null, InternalDatasetDetails.PartitioningStrategy.HASH, partitioningKeys,
- null, null, null, false, null),
+ NoMergePolicyFactory.NAME,
+ null, new InternalDatasetDetails(null, InternalDatasetDetails.PartitioningStrategy.HASH,
+ partitioningKeys, null, null, null, false, null),
null, DatasetConfig.DatasetType.INTERNAL, DATASET_ID, 0);
// create dataset
TestNodeController.PrimaryIndexInfo indexInfo = nc.createPrimaryIndex(dataset, KEY_TYPES, RECORD_TYPE,
META_TYPE, null, storageManager, KEY_INDEXES, KEY_INDICATORS_LIST, 0);
- IndexDataflowHelperFactory helperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(),
- indexInfo.getFileSplitProvider());
+ IndexDataflowHelperFactory helperFactory =
+ new IndexDataflowHelperFactory(nc.getStorageManager(), indexInfo.getFileSplitProvider());
JobId jobId = nc.newJobId();
IHyracksTaskContext ctx = nc.createTestContext(jobId, 0, true);
IIndexDataflowHelper dataflowHelper = helperFactory.create(ctx.getJobletContext().getServiceContext(), 0);
@@ -153,10 +153,10 @@
MetadataManager.INSTANCE.commitTransaction(mdTxn);
FileSplit[] splits = SplitsAndConstraintsUtil.getIndexSplits(appCtx.getClusterStateManager(), dataset,
indexName, Arrays.asList("asterix_nc1"));
- final ConstantFileSplitProvider constantFileSplitProvider = new ConstantFileSplitProvider(
- Arrays.copyOfRange(splits, 0, 1));
- IndexDataflowHelperFactory helperFactory = new IndexDataflowHelperFactory(nc.getStorageManager(),
- constantFileSplitProvider);
+ final ConstantFileSplitProvider constantFileSplitProvider =
+ new ConstantFileSplitProvider(Arrays.copyOfRange(splits, 0, 1));
+ IndexDataflowHelperFactory helperFactory =
+ new IndexDataflowHelperFactory(nc.getStorageManager(), constantFileSplitProvider);
JobId jobId = nc.newJobId();
IHyracksTaskContext ctx = nc.createTestContext(jobId, 0, true);
IIndexDataflowHelper dataflowHelper = helperFactory.create(ctx.getJobletContext().getServiceContext(), 0);
@@ -172,8 +172,8 @@
// open the index to make it in-use
dataflowHelper.open();
// try to drop in-use index (should fail)
- IndexDropOperatorNodePushable dropInUseOp = new IndexDropOperatorNodePushable(helperFactory,
- EnumSet.noneOf(DropOption.class), ctx, 0);
+ IndexDropOperatorNodePushable dropInUseOp =
+ new IndexDropOperatorNodePushable(helperFactory, EnumSet.noneOf(DropOption.class), ctx, 0);
try {
dropInUseOp.initialize();
} catch (HyracksDataException e) {
@@ -212,8 +212,8 @@
private void dropNonExisting(IHyracksTaskContext ctx, IndexDataflowHelperFactory helperFactory) throws Exception {
dropFailed.set(false);
// Dropping non-existing index
- IndexDropOperatorNodePushable dropNonExistingOp = new IndexDropOperatorNodePushable(helperFactory,
- EnumSet.noneOf(DropOption.class), ctx, 0);
+ IndexDropOperatorNodePushable dropNonExistingOp =
+ new IndexDropOperatorNodePushable(helperFactory, EnumSet.noneOf(DropOption.class), ctx, 0);
try {
dropNonExistingOp.initialize();
} catch (HyracksDataException e) {
@@ -228,8 +228,8 @@
throws Exception {
// Dropping non-existing index with if exists option should be successful
dropFailed.set(false);
- IndexDropOperatorNodePushable dropNonExistingWithIfExistsOp = new IndexDropOperatorNodePushable(helperFactory,
- EnumSet.of(DropOption.IF_EXISTS), ctx, 0);
+ IndexDropOperatorNodePushable dropNonExistingWithIfExistsOp =
+ new IndexDropOperatorNodePushable(helperFactory, EnumSet.of(DropOption.IF_EXISTS), ctx, 0);
try {
dropNonExistingWithIfExistsOp.initialize();
} catch (HyracksDataException e) {
diff --git a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java
index 8a721b9..5432df9 100644
--- a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java
+++ b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/Args.java
@@ -28,22 +28,18 @@
@SuppressWarnings("FieldCanBeLocal")
public class Args {
- @Option(name = "-clusteraddress", metaVar = "<address>",
- usage = "Hostname or IP Address of the cluster")
+ @Option(name = "-clusteraddress", metaVar = "<address>", usage = "Hostname or IP Address of the cluster")
protected String clusterAddress = InetAddress.getLoopbackAddress().getHostAddress();
@Option(name = "-clusterport", metaVar = "<port>", usage = "Port of the cluster to connect to")
protected int clusterPort = 19002;
- @Option(name = "-clusterstatepath", metaVar = "<path>", hidden = true,
- usage = "Path on host:port to check for cluster readiness")
+ @Option(name = "-clusterstatepath", metaVar = "<path>", hidden = true, usage = "Path on host:port to check for cluster readiness")
protected String clusterStatePath = "admin/cluster";
- @Option(name = "-shutdownpath", metaVar = "<path>", hidden = true,
- usage = "Path on host:port to invoke to initiate shutdown")
+ @Option(name = "-shutdownpath", metaVar = "<path>", hidden = true, usage = "Path on host:port to invoke to initiate shutdown")
protected String shutdownPath = "admin/shutdown";
-
@Option(name = "-timeout", metaVar = "<secs>", usage = "Timeout for wait commands in seconds")
protected int timeoutSecs = 0;
diff --git a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelper.java b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelper.java
index 7dfede9..d71b844 100644
--- a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelper.java
+++ b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelper.java
@@ -25,7 +25,7 @@
private AsterixHelper() {
}
- public static void main(String [] args) throws IOException {
+ public static void main(String[] args) throws IOException {
AsterixHelperExecution execution = new AsterixHelperExecution();
System.exit(execution.execute(args));
}
diff --git a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelperExecution.java b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelperExecution.java
index cf7fd4f..326b1ca 100644
--- a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelperExecution.java
+++ b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/AsterixHelperExecution.java
@@ -40,11 +40,10 @@
protected AsterixHelperExecution() {
}
- @SuppressWarnings({
- "squid:S106", // use of System.err
+ @SuppressWarnings({ "squid:S106", // use of System.err
"squid:S1166" // rethrow or log exception
})
- public int execute(String [] argArray) throws IOException {
+ public int execute(String[] argArray) throws IOException {
Args args = createArgs();
CmdLineParser parser = createParser(args);
try {
@@ -59,8 +58,8 @@
return command.execute();
}
} catch (CmdLineException e) {
- System.err.println("ERROR: " + e.getMessage() + "\n\n"
- + "Usage: " + getHelperCommandName() + " [options] <command>");
+ System.err.println(
+ "ERROR: " + e.getMessage() + "\n\n" + "Usage: " + getHelperCommandName() + " [options] <command>");
printUsageDetails(parser, System.err);
return 99;
diff --git a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java
index 6a16761..4ba48c6 100644
--- a/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java
+++ b/asterixdb/asterix-client-helper/src/main/java/org/apache/asterix/clienthelper/commands/RemoteCommand.java
@@ -67,7 +67,7 @@
protected HttpURLConnection openConnection(String path, Method method) throws IOException {
URL url = new URL("http://" + hostPort + "/" + path);
- HttpURLConnection conn = (HttpURLConnection)url.openConnection();
+ HttpURLConnection conn = (HttpURLConnection) url.openConnection();
final int timeoutMillis =
(int) TimeUnit.SECONDS.toMillis(Math.max(MAX_CONNECTION_TIMEOUT_SECS, args.getTimeoutSecs()));
conn.setConnectTimeout(timeoutMillis);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/annotations/SkipSecondaryIndexSearchExpressionAnnotation.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/annotations/SkipSecondaryIndexSearchExpressionAnnotation.java
index de1e1fa..4d08b54 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/annotations/SkipSecondaryIndexSearchExpressionAnnotation.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/annotations/SkipSecondaryIndexSearchExpressionAnnotation.java
@@ -24,7 +24,8 @@
public class SkipSecondaryIndexSearchExpressionAnnotation extends AbstractExpressionAnnotation {
public static final String HINT_STRING = "skip-index";
- public static final SkipSecondaryIndexSearchExpressionAnnotation INSTANCE = new SkipSecondaryIndexSearchExpressionAnnotation();
+ public static final SkipSecondaryIndexSearchExpressionAnnotation INSTANCE =
+ new SkipSecondaryIndexSearchExpressionAnnotation();
@Override
public IExpressionAnnotation copy() {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java
index 8bc6eb0..c2d3303 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWork.java
@@ -26,12 +26,12 @@
}
enum ClusterState {
- UNUSABLE, // one or more cluster partitions are inactive or max id resources have not been reported
- PENDING, // the metadata node has not yet joined & initialized
- RECOVERING, // global recovery has not yet completed
- ACTIVE, // cluster is ACTIVE and ready for requests
- REBALANCING, // replication is processing failbacks
- SHUTTING_DOWN // a shutdown request has been received, and is underway
+ UNUSABLE, // one or more cluster partitions are inactive or max id resources have not been reported
+ PENDING, // the metadata node has not yet joined & initialized
+ RECOVERING, // global recovery has not yet completed
+ ACTIVE, // cluster is ACTIVE and ready for requests
+ REBALANCING, // replication is processing failbacks
+ SHUTTING_DOWN // a shutdown request has been received, and is underway
}
WorkType getClusterManagementWorkType();
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWorkResponse.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWorkResponse.java
index a39aecd..0506817 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWorkResponse.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/api/IClusterManagementWorkResponse.java
@@ -18,7 +18,6 @@
*/
package org.apache.asterix.common.api;
-
public interface IClusterManagementWorkResponse {
public enum Status {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
index 690d326..2c981c9 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/AsterixProperties.java
@@ -34,15 +34,9 @@
}
public static void registerConfigOptions(IConfigManager configManager) {
- configManager.register(
- NodeProperties.Option.class,
- CompilerProperties.Option.class,
- MetadataProperties.Option.class,
- ExternalProperties.Option.class,
- ActiveProperties.Option.class,
- MessagingProperties.Option.class,
- ReplicationProperties.Option.class,
- StorageProperties.Option.class,
+ configManager.register(NodeProperties.Option.class, CompilerProperties.Option.class,
+ MetadataProperties.Option.class, ExternalProperties.Option.class, ActiveProperties.Option.class,
+ MessagingProperties.Option.class, ReplicationProperties.Option.class, StorageProperties.Option.class,
TransactionProperties.Option.class);
// we need to process the old-style asterix config before we apply defaults!
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
index 5ccff4f..0d29943 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/CompilerProperties.java
@@ -33,19 +33,30 @@
public class CompilerProperties extends AbstractProperties {
public enum Option implements IOption {
- COMPILER_SORTMEMORY(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(32L, MEGABYTE),
+ COMPILER_SORTMEMORY(
+ LONG_BYTE_UNIT,
+ StorageUtil.getLongSizeInBytes(32L, MEGABYTE),
"The memory budget (in bytes) for a sort operator instance in a partition"),
- COMPILER_JOINMEMORY(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(32L, MEGABYTE),
+ COMPILER_JOINMEMORY(
+ LONG_BYTE_UNIT,
+ StorageUtil.getLongSizeInBytes(32L, MEGABYTE),
"The memory budget (in bytes) for a join operator instance in a partition"),
- COMPILER_GROUPMEMORY(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(32L, MEGABYTE),
+ COMPILER_GROUPMEMORY(
+ LONG_BYTE_UNIT,
+ StorageUtil.getLongSizeInBytes(32L, MEGABYTE),
"The memory budget (in bytes) for a group by operator instance in a partition"),
- COMPILER_FRAMESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(32, KILOBYTE),
+ COMPILER_FRAMESIZE(
+ INTEGER_BYTE_UNIT,
+ StorageUtil.getIntSizeInBytes(32, KILOBYTE),
"The page size (in bytes) for computation"),
- COMPILER_PARALLELISM(INTEGER, COMPILER_PARALLELISM_AS_STORAGE, "The degree of parallelism for query " +
- "execution. Zero means to use the storage parallelism as the query execution parallelism, while " +
- "other integer values dictate the number of query execution parallel partitions. The system will " +
- "fall back to use the number of all available CPU cores in the cluster as the degree of parallelism " +
- "if the number set by a user is too large or too small"),
+ COMPILER_PARALLELISM(
+ INTEGER,
+ COMPILER_PARALLELISM_AS_STORAGE,
+ "The degree of parallelism for query "
+ + "execution. Zero means to use the storage parallelism as the query execution parallelism, while "
+ + "other integer values dictate the number of query execution parallel partitions. The system will "
+ + "fall back to use the number of all available CPU cores in the cluster as the degree of parallelism "
+ + "if the number set by a user is too large or too small"),
COMPILER_PREGELIX_HOME(STRING, "~/pregelix", "Pregelix installation root directory");
private final IOptionType type;
@@ -83,6 +94,7 @@
return this == COMPILER_PREGELIX_HOME;
}
}
+
public static final String COMPILER_SORTMEMORY_KEY = Option.COMPILER_SORTMEMORY.ini();
public static final String COMPILER_GROUPMEMORY_KEY = Option.COMPILER_GROUPMEMORY.ini();
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
index 4172a72..4bc1953 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ExternalProperties.java
@@ -36,8 +36,11 @@
ACTIVE_PORT(INTEGER, 19003, "The listen port of the active server"),
NC_API_PORT(INTEGER, 19004, "The listen port of the node controller API server"),
LOG_LEVEL(LEVEL, Level.WARN, "The logging level for master and slave processes"),
- MAX_WAIT_ACTIVE_CLUSTER(INTEGER, 60, "The max pending time (in seconds) for cluster startup. After the " +
- "threshold, if the cluster still is not up and running, it is considered unavailable"),
+ MAX_WAIT_ACTIVE_CLUSTER(
+ INTEGER,
+ 60,
+ "The max pending time (in seconds) for cluster startup. After the "
+ + "threshold, if the cluster still is not up and running, it is considered unavailable"),
CC_JAVA_OPTS(STRING, "-Xmx1024m", "The JVM options passed to the cluster controller process by managix"),
NC_JAVA_OPTS(STRING, "-Xmx1024m", "The JVM options passed to the node controller process(es) by managix");
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java
index 6a96546..e382293 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/MessagingProperties.java
@@ -30,7 +30,9 @@
public class MessagingProperties extends AbstractProperties {
public enum Option implements IOption {
- MESSAGING_FRAME_SIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(4, KILOBYTE),
+ MESSAGING_FRAME_SIZE(
+ INTEGER_BYTE_UNIT,
+ StorageUtil.getIntSizeInBytes(4, KILOBYTE),
"The frame size to be used for NC to NC messaging"),
MESSAGING_FRAME_COUNT(INTEGER, 512, "Number of reusable frames for NC to NC messaging");
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java
index 3455774..18503b2 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/ReplicationProperties.java
@@ -36,12 +36,18 @@
public class ReplicationProperties extends AbstractProperties {
public enum Option implements IOption {
- REPLICATION_LOG_BUFFER_PAGESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(128, StorageUnit.KILOBYTE),
+ REPLICATION_LOG_BUFFER_PAGESIZE(
+ INTEGER_BYTE_UNIT,
+ StorageUtil.getIntSizeInBytes(128, StorageUnit.KILOBYTE),
"The size in bytes of each log buffer page"),
REPLICATION_LOG_BUFFER_NUMPAGES(INTEGER, 8, "The number of log buffer pages"),
- REPLICATION_LOG_BATCHSIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(4, StorageUnit.KILOBYTE),
+ REPLICATION_LOG_BATCHSIZE(
+ INTEGER_BYTE_UNIT,
+ StorageUtil.getIntSizeInBytes(4, StorageUnit.KILOBYTE),
"The size in bytes to replicate in each batch"),
- REPLICATION_TIMEOUT(LONG, TimeUnit.SECONDS.toSeconds(30),
+ REPLICATION_TIMEOUT(
+ LONG,
+ TimeUnit.SECONDS.toSeconds(30),
"The time in seconds to timeout waiting for master or replica to ack"),
REPLICATION_ENABLED(BOOLEAN, false, "Whether or not data replication is enabled"),
REPLICATION_FACTOR(INTEGER, 2, "Number of replicas (backups) to maintain per master replica"),
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java
index d363f3d..963f0ca 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/StorageProperties.java
@@ -43,16 +43,14 @@
STORAGE_MEMORYCOMPONENT_GLOBALBUDGET(LONG_BYTE_UNIT, Runtime.getRuntime().maxMemory() / 4),
STORAGE_MEMORYCOMPONENT_PAGESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(128, KILOBYTE)),
STORAGE_MEMORYCOMPONENT_NUMPAGES(INTEGER, (Function<IApplicationConfig, Integer>) accessor ->
- // By default, uses 1/16 of the STORAGE_MEMORYCOMPONENT_GLOBALBUDGET for the write buffer
- // budget for a dataset, including data and indexes.
- (int) (accessor.getLong(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) /
- (16 * accessor.getInt(STORAGE_MEMORYCOMPONENT_PAGESIZE)))),
+ // By default, uses 1/16 of the STORAGE_MEMORYCOMPONENT_GLOBALBUDGET for the write buffer
+ // budget for a dataset, including data and indexes.
+ (int) (accessor.getLong(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) / (16 * accessor.getInt(STORAGE_MEMORYCOMPONENT_PAGESIZE)))),
STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS(INTEGER, 2),
STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES(INTEGER, (Function<IApplicationConfig, Integer>) accessor ->
- // By default, uses the min of 1/64 of the STORAGE_MEMORYCOMPONENT_GLOBALBUDGET and 256 pages
- // for the write buffer budget for a metadata dataset, including data and indexes.
- Math.min((int) (accessor.getLong(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET)
- / (64 * accessor.getInt(STORAGE_MEMORYCOMPONENT_PAGESIZE))), 256)),
+ // By default, uses the min of 1/64 of the STORAGE_MEMORYCOMPONENT_GLOBALBUDGET and 256 pages
+ // for the write buffer budget for a metadata dataset, including data and indexes.
+ Math.min((int) (accessor.getLong(STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) / (64 * accessor.getInt(STORAGE_MEMORYCOMPONENT_PAGESIZE))), 256)),
STORAGE_LSM_BLOOMFILTER_FALSEPOSITIVERATE(DOUBLE, 0.01d);
private final IOptionType interpreter;
@@ -79,20 +77,20 @@
case STORAGE_BUFFERCACHE_PAGESIZE:
return "The page size in bytes for pages in the buffer cache";
case STORAGE_BUFFERCACHE_SIZE:
- return "The size of memory allocated to the disk buffer cache. The value should be a multiple" +
- " of the buffer cache page size.";
+ return "The size of memory allocated to the disk buffer cache. The value should be a multiple"
+ + " of the buffer cache page size.";
case STORAGE_BUFFERCACHE_MAXOPENFILES:
return "The maximum number of open files in the buffer cache";
case STORAGE_MEMORYCOMPONENT_GLOBALBUDGET:
- return "The size of memory allocated to the memory components. The value should be a multiple " +
- "of the memory component page size";
+ return "The size of memory allocated to the memory components. The value should be a multiple "
+ + "of the memory component page size";
case STORAGE_MEMORYCOMPONENT_PAGESIZE:
return "The page size in bytes for pages allocated to memory components";
case STORAGE_MEMORYCOMPONENT_NUMPAGES:
- return "The number of pages to allocate for a memory component. This budget is shared by all " +
- "the memory components of the primary index and all its secondary indexes across all I/O " +
- "devices on a node. Note: in-memory components usually has fill factor of 75% since " +
- "the pages are 75% full and the remaining 25% is un-utilized";
+ return "The number of pages to allocate for a memory component. This budget is shared by all "
+ + "the memory components of the primary index and all its secondary indexes across all I/O "
+ + "devices on a node. Note: in-memory components usually has fill factor of 75% since "
+ + "the pages are 75% full and the remaining 25% is un-utilized";
case STORAGE_MEMORYCOMPONENT_NUMCOMPONENTS:
return "The number of memory components to be used per lsm index";
case STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES:
@@ -114,16 +112,15 @@
return defaultValue;
}
-
@Override
public String usageDefaultOverride(IApplicationConfig accessor, Function<IOption, String> optionPrinter) {
switch (this) {
case STORAGE_MEMORYCOMPONENT_NUMPAGES:
- return "1/16th of the " + optionPrinter.apply(Option.STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) +
- " value";
+ return "1/16th of the " + optionPrinter.apply(Option.STORAGE_MEMORYCOMPONENT_GLOBALBUDGET)
+ + " value";
case STORAGE_METADATA_MEMORYCOMPONENT_NUMPAGES:
- return "1/64th of the " + optionPrinter.apply(Option.STORAGE_MEMORYCOMPONENT_GLOBALBUDGET) +
- " value or 256, whichever is larger";
+ return "1/64th of the " + optionPrinter.apply(Option.STORAGE_MEMORYCOMPONENT_GLOBALBUDGET)
+ + " value or 256, whichever is larger";
default:
return null;
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
index b809b4d..c15f8a7 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/config/TransactionProperties.java
@@ -36,26 +36,39 @@
public enum Option implements IOption {
TXN_LOG_BUFFER_NUMPAGES(INTEGER, 8, "The number of pages in the transaction log tail"),
- TXN_LOG_BUFFER_PAGESIZE(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(128, KILOBYTE),
+ TXN_LOG_BUFFER_PAGESIZE(
+ INTEGER_BYTE_UNIT,
+ StorageUtil.getIntSizeInBytes(128, KILOBYTE),
"The page size (in bytes) for transaction log buffer"),
- TXN_LOG_PARTITIONSIZE(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(256L, MEGABYTE),
+ TXN_LOG_PARTITIONSIZE(
+ LONG_BYTE_UNIT,
+ StorageUtil.getLongSizeInBytes(256L, MEGABYTE),
"The maximum size (in bytes) of each transaction log file"),
- TXN_LOG_CHECKPOINT_LSNTHRESHOLD(INTEGER_BYTE_UNIT, StorageUtil.getIntSizeInBytes(64, MEGABYTE),
- "The checkpoint threshold (in terms of LSNs (log sequence numbers) that have been written to the " +
- "transaction log, i.e., the length of the transaction log) for transaction logs"),
- TXN_LOG_CHECKPOINT_POLLFREQUENCY(INTEGER, 120,
- "The frequency (in seconds) the checkpoint thread should check to see if a checkpoint should be written"
- ),
+ TXN_LOG_CHECKPOINT_LSNTHRESHOLD(
+ INTEGER_BYTE_UNIT,
+ StorageUtil.getIntSizeInBytes(64, MEGABYTE),
+ "The checkpoint threshold (in terms of LSNs (log sequence numbers) that have been written to the "
+ + "transaction log, i.e., the length of the transaction log) for transaction logs"),
+ TXN_LOG_CHECKPOINT_POLLFREQUENCY(
+ INTEGER,
+ 120,
+ "The frequency (in seconds) the checkpoint thread should check to see if a checkpoint should be written"),
TXN_LOG_CHECKPOINT_HISTORY(INTEGER, 0, "The number of checkpoints to keep in the transaction log"),
- TXN_LOCK_ESCALATIONTHRESHOLD(INTEGER, 1000,
+ TXN_LOCK_ESCALATIONTHRESHOLD(
+ INTEGER,
+ 1000,
"The maximum number of entity locks to obtain before upgrading to a dataset lock"),
- TXN_LOCK_SHRINKTIMER(INTEGER, 5000,
+ TXN_LOCK_SHRINKTIMER(
+ INTEGER,
+ 5000,
"The time (in milliseconds) where under utilization of resources will trigger a shrink phase"),
TXN_LOCK_TIMEOUT_WAITTHRESHOLD(INTEGER, 60000, "Time out (in milliseconds) of waiting for a lock"),
TXN_LOCK_TIMEOUT_SWEEPTHRESHOLD(INTEGER, 10000, "Interval (in milliseconds) for checking lock timeout"),
TXN_COMMITPROFILER_ENABLED(BOOLEAN, false, "Enable output of commit profiler logs"),
TXN_COMMITPROFILER_REPORTINTERVAL(INTEGER, 5, "Interval (in seconds) to report commit profiler logs"),
- TXN_JOB_RECOVERY_MEMORYSIZE(LONG_BYTE_UNIT, StorageUtil.getLongSizeInBytes(64L, MEGABYTE),
+ TXN_JOB_RECOVERY_MEMORYSIZE(
+ LONG_BYTE_UNIT,
+ StorageUtil.getLongSizeInBytes(64L, MEGABYTE),
"The memory budget (in bytes) used for recovery");
private final IOptionType type;
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java
index e839d8c..34d0774 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/context/DatasetMemoryManager.java
@@ -109,9 +109,9 @@
@Override
public int getNumPages(int datasetId) {
- return MetadataIndexImmutableProperties.isMetadataDataset(datasetId) ?
- storageProperties.getMetadataMemoryComponentNumPages() :
- storageProperties.getMemoryComponentNumPages();
+ return MetadataIndexImmutableProperties.isMetadataDataset(datasetId)
+ ? storageProperties.getMetadataMemoryComponentNumPages()
+ : storageProperties.getMemoryComponentNumPages();
}
private long getTotalSize(int datasetId) {
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java
index d4b4215..d7b054d 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/functions/FunctionSignature.java
@@ -44,9 +44,10 @@
return false;
} else {
FunctionSignature f = ((FunctionSignature) o);
- return ((namespace != null && namespace.equals(f.getNamespace()) || (namespace == null && f.getNamespace() == null)))
- && name.equals(f.getName())
- && (arity == f.getArity() || arity == FunctionIdentifier.VARARGS || f.getArity() == FunctionIdentifier.VARARGS);
+ return ((namespace != null && namespace.equals(f.getNamespace())
+ || (namespace == null && f.getNamespace() == null))) && name.equals(f.getName())
+ && (arity == f.getArity() || arity == FunctionIdentifier.VARARGS
+ || f.getArity() == FunctionIdentifier.VARARGS);
}
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/ioopcallbacks/AbstractLSMIOOperationCallback.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/ioopcallbacks/AbstractLSMIOOperationCallback.java
index c625988..bacebf1 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/ioopcallbacks/AbstractLSMIOOperationCallback.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/ioopcallbacks/AbstractLSMIOOperationCallback.java
@@ -125,9 +125,9 @@
if (oldComponents == null) {
throw new IllegalStateException("Merge must have old components");
}
- LongPointable markerLsn = LongPointable.FACTORY.createPointable(ComponentUtils
- .getLong(oldComponents.get(0).getMetadata(), ComponentUtils.MARKER_LSN_KEY,
- ComponentUtils.NOT_FOUND));
+ LongPointable markerLsn =
+ LongPointable.FACTORY.createPointable(ComponentUtils.getLong(oldComponents.get(0).getMetadata(),
+ ComponentUtils.MARKER_LSN_KEY, ComponentUtils.NOT_FOUND));
newComponent.getMetadata().put(ComponentUtils.MARKER_LSN_KEY, markerLsn);
} else if (opType == LSMIOOperationType.FLUSH) {
// advance memory component indexes
@@ -182,8 +182,8 @@
if (mergedComponents == null || mergedComponents.isEmpty()) {
return null;
}
- return LSMComponentIdUtils
- .union(mergedComponents.get(0).getId(), mergedComponents.get(mergedComponents.size() - 1).getId());
+ return LSMComponentIdUtils.union(mergedComponents.get(0).getId(),
+ mergedComponents.get(mergedComponents.size() - 1).getId());
}
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java
index d522aa5..b7d34f2 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/memory/ConcurrentFramePool.java
@@ -33,10 +33,10 @@
public class ConcurrentFramePool {
private static final boolean DEBUG = false;
- private static final String ERROR_INVALID_FRAME_SIZE = "The size should be an integral "
- + "multiple of the default frame size";
- private static final String ERROR_LARGER_THAN_BUDGET_REQUEST = "The requested frame size"
- + " must not be greater than the allocated budget";
+ private static final String ERROR_INVALID_FRAME_SIZE =
+ "The size should be an integral " + "multiple of the default frame size";
+ private static final String ERROR_LARGER_THAN_BUDGET_REQUEST =
+ "The requested frame size" + " must not be greater than the allocated budget";
private static final Logger LOGGER = LogManager.getLogger();
private final String nodeId;
private final int budget;
@@ -234,8 +234,8 @@
try {
frameAction.call(freeBuffer);
} catch (Exception e) {
- LOGGER.log(Level.ERROR,
- "Error while attempting to answer a subscription. Buffer will be reclaimed", e);
+ LOGGER.log(Level.ERROR, "Error while attempting to answer a subscription. Buffer will be reclaimed",
+ e);
// TODO(amoudi): Add test cases and get rid of recursion
if (handedOut == handedOutBeforeCall) {
release(freeBuffer);
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
index 6ec2d7e..6bd58a9 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/messaging/api/INCMessageBroker.java
@@ -45,8 +45,7 @@
* @param message
* @throws Exception
*/
- public void sendMessageToNC(String nodeId, INcAddressedMessage message)
- throws Exception;
+ public void sendMessageToNC(String nodeId, INcAddressedMessage message) throws Exception;
/**
* Queue a message to this {@link INCMessageBroker} for processing
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/PrintUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/PrintUtil.java
index 8c0e4ff..d02ef7c 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/PrintUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/PrintUtil.java
@@ -38,7 +38,7 @@
sb.append(entry.getKey());
sb.append('=');
sb.append(Arrays.toString(entry.getValue()));
- if (! iter.hasNext()) {
+ if (!iter.hasNext()) {
break;
}
sb.append(',').append(' ');
diff --git a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
index 7b9a9a2..6b13468 100644
--- a/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
+++ b/asterixdb/asterix-common/src/main/java/org/apache/asterix/common/utils/StoragePathUtil.java
@@ -77,9 +77,8 @@
}
public static int getPartitionNumFromRelativePath(String relativePath) {
- int startIdx =
- relativePath.indexOf(StorageConstants.PARTITION_DIR_PREFIX) + StorageConstants.PARTITION_DIR_PREFIX
- .length();
+ int startIdx = relativePath.indexOf(StorageConstants.PARTITION_DIR_PREFIX)
+ + StorageConstants.PARTITION_DIR_PREFIX.length();
String partition = relativePath.substring(startIdx, relativePath.indexOf(File.separatorChar, startIdx));
return Integer.parseInt(partition);
}
diff --git a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/base/RetainLogsRule.java b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/base/RetainLogsRule.java
index 2077ad5..1fe7dc2 100644
--- a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/base/RetainLogsRule.java
+++ b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/base/RetainLogsRule.java
@@ -49,12 +49,12 @@
@Override
protected void failed(Throwable e, Description description) {
- File reportDir = new File(destDir, description.getTestClass().getSimpleName() + "." + description.getMethodName());
+ File reportDir =
+ new File(destDir, description.getTestClass().getSimpleName() + "." + description.getMethodName());
reportDir.mkdirs();
try {
AsterixTestHelper.deepSelectiveCopy(baseDir, reportDir,
- pathname -> pathname.getName().endsWith("log") &&
- pathname.lastModified() > startTime);
+ pathname -> pathname.getName().endsWith("log") && pathname.lastModified() > startTime);
} catch (Exception e1) {
e1.printStackTrace();
}
diff --git a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/memory/ConcurrentFramePoolUnitTest.java b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/memory/ConcurrentFramePoolUnitTest.java
index 9be5837..0243a63 100644
--- a/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/memory/ConcurrentFramePoolUnitTest.java
+++ b/asterixdb/asterix-common/src/test/java/org/apache/asterix/test/memory/ConcurrentFramePoolUnitTest.java
@@ -60,8 +60,8 @@
public void testMemoryManager() {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
int i = 0;
while (fmm.get() != null) {
i++;
@@ -75,8 +75,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
FixedSizeAllocator[] runners = new FixedSizeAllocator[NUM_THREADS];
Thread[] threads = new Thread[NUM_THREADS];
Arrays.parallelSetAll(runners, (int i) -> new FixedSizeAllocator(fmm));
@@ -106,8 +106,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
Random random = new Random();
int i = 0;
int req;
@@ -141,8 +141,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
VarSizeAllocator[] runners = new VarSizeAllocator[NUM_THREADS];
Thread[] threads = new Thread[NUM_THREADS];
@@ -180,8 +180,8 @@
public void testAcquireReleaseMemoryManager() throws HyracksDataException {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
Random random = new Random();
ArrayDeque<ByteBuffer> stack = new ArrayDeque<>();
while (true) {
@@ -213,8 +213,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
FixedSizeGoodAllocator[] runners = new FixedSizeGoodAllocator[NUM_THREADS];
Thread[] threads = new Thread[NUM_THREADS];
Arrays.parallelSetAll(runners, (int i) -> new FixedSizeGoodAllocator(fmm));
@@ -244,8 +244,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
Random random = new Random();
ArrayDeque<ByteBuffer> stack = new ArrayDeque<>();
int i = 0;
@@ -297,8 +297,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
VarSizeGoodAllocator[] runners = new VarSizeGoodAllocator[NUM_THREADS];
Thread[] threads = new Thread[NUM_THREADS];
Arrays.parallelSetAll(runners, (int i) -> new VarSizeGoodAllocator(fmm));
@@ -333,8 +333,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
int i = 0;
ByteBuffer buffer = ByteBuffer.allocate(DEFAULT_FRAME_SIZE);
LinkedBlockingDeque<ByteBuffer> buffers = new LinkedBlockingDeque<>();
@@ -399,8 +399,8 @@
try {
ActiveProperties afp = Mockito.mock(ActiveProperties.class);
Mockito.when(afp.getMemoryComponentGlobalBudget()).thenReturn(FEED_MEM_BUDGET);
- ConcurrentFramePool fmm = new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(),
- DEFAULT_FRAME_SIZE);
+ ConcurrentFramePool fmm =
+ new ConcurrentFramePool("TestNode", afp.getMemoryComponentGlobalBudget(), DEFAULT_FRAME_SIZE);
int i = 0;
ByteBuffer buffer = ByteBuffer.allocate(DEFAULT_FRAME_SIZE);
LinkedBlockingDeque<ByteBuffer> buffers = new LinkedBlockingDeque<>();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
index 57e79c3..2db17e2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedDataFlowController.java
@@ -33,7 +33,7 @@
public ChangeFeedDataFlowController(final IHyracksTaskContext ctx, final FeedTupleForwarder tupleForwarder,
final FeedLogManager feedLogManager, final int numOfOutputFields,
final IRecordWithPKDataParser<T> dataParser, final IRecordReader<T> recordReader)
- throws HyracksDataException {
+ throws HyracksDataException {
super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
this.dataParser = dataParser;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
index 22fa8be..4447b28 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/ChangeFeedWithMetaDataFlowController.java
@@ -31,7 +31,7 @@
public ChangeFeedWithMetaDataFlowController(final IHyracksTaskContext ctx, final FeedTupleForwarder tupleForwarder,
final FeedLogManager feedLogManager, final int numOfOutputFields,
final IRecordWithMetadataParser<T> dataParser, final IRecordReader<T> recordReader)
- throws HyracksDataException {
+ throws HyracksDataException {
super(ctx, tupleForwarder, feedLogManager, numOfOutputFields, dataParser, recordReader);
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
index 8026efe..3b9391e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
@@ -282,7 +282,7 @@
@Override
public String getStats() {
- return "{\"" + INCOMING_RECORDS_COUNT_FIELD_NAME + "\": " + incomingRecordsCount + ", \"" +
- FAILED_AT_PARSER_RECORDS_COUNT_FIELD_NAME + "\": " + failedRecordsCount + "}";
+ return "{\"" + INCOMING_RECORDS_COUNT_FIELD_NAME + "\": " + incomingRecordsCount + ", \""
+ + FAILED_AT_PARSER_RECORDS_COUNT_FIELD_NAME + "\": " + failedRecordsCount + "}";
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
index 82251b6..0f9572d 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
@@ -34,8 +34,7 @@
protected final int numOfTupleFields;
public RecordDataFlowController(IHyracksTaskContext ctx, ITupleForwarder tupleForwarder,
- IRecordDataParser<T> dataParser, IRecordReader<? extends T> recordReader,
- int numOfTupleFields) {
+ IRecordDataParser<T> dataParser, IRecordReader<? extends T> recordReader, int numOfTupleFields) {
super(ctx, tupleForwarder);
this.dataParser = dataParser;
this.recordReader = recordReader;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/LookupAdapter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/LookupAdapter.java
index dd713e6..aeeb04d 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/LookupAdapter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/dataset/adapter/LookupAdapter.java
@@ -57,7 +57,7 @@
public LookupAdapter(IRecordDataParser<T> dataParser, ILookupRecordReader<? extends T> recordReader,
RecordDescriptor inRecDesc, RecordIdReader ridReader, boolean propagateInput, boolean retainNull,
IMissingWriterFactory iNullWriterFactory, IHyracksTaskContext ctx, IFrameWriter writer)
- throws HyracksDataException {
+ throws HyracksDataException {
this.dataParser = dataParser;
this.recordReader = recordReader;
this.propagateInput = propagateInput;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
index df5a3c4..b89c10b 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
@@ -77,11 +77,9 @@
IFrameWriter writer, FeedPolicyAccessor fpa, FrameTupleAccessor fta, ConcurrentFramePool framePool)
throws HyracksDataException {
this.writer = writer;
- this.spiller = fpa.spillToDiskOnCongestion() ?
- new FrameSpiller(ctx,
- connectionId.getFeedId() + "_" + connectionId.getDatasetName() + "_" + runtimeId.getPartition(),
- fpa.getMaxSpillOnDisk()) :
- null;
+ this.spiller = fpa.spillToDiskOnCongestion() ? new FrameSpiller(ctx,
+ connectionId.getFeedId() + "_" + connectionId.getDatasetName() + "_" + runtimeId.getPartition(),
+ fpa.getMaxSpillOnDisk()) : null;
this.exceptionHandler = new FeedExceptionHandler(ctx, fta);
this.fpa = fpa;
this.framePool = framePool;
@@ -289,8 +287,8 @@
while (spiller.usedBudget() > MAX_SPILL_USED_BEFORE_RESUME) {
if (DEBUG) {
LOGGER.info("in stall(frame). Spilling has been consumed. We will wait for it to be less than "
- + MAX_SPILL_USED_BEFORE_RESUME + " consumed. Current consumption = " + spiller
- .usedBudget());
+ + MAX_SPILL_USED_BEFORE_RESUME + " consumed. Current consumption = "
+ + spiller.usedBudget());
}
spiller.wait();
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/DataGenerator.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/DataGenerator.java
index 9bc3037..a0feac5 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/DataGenerator.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/DataGenerator.java
@@ -177,12 +177,11 @@
: (date.getMonth() + random.nextInt(endDate.getMonth() - date.getMonth()))
: random.nextInt(12) + 1;
- int day = (year == endDate.getYear())
- ? month == endDate.getMonth()
+ int day =
+ (year == endDate.getYear()) ? month == endDate.getMonth()
? date.getDay() == endDate.getDay() ? endDate.getDay()
: date.getDay() + random.nextInt(endDate.getDay() - date.getDay())
- : random.nextInt(28) + 1
- : random.nextInt(28) + 1;
+ : random.nextInt(28) + 1 : random.nextInt(28) + 1;
recentDate.reset(month, day, year);
return recentDate;
}
@@ -495,9 +494,9 @@
public static class TweetMessage {
- private static final String[] DEFAULT_FIELDS = new String[] { TweetFields.TWEETID, TweetFields.USER,
- TweetFields.LATITUDE, TweetFields.LONGITUDE, TweetFields.MESSAGE_TEXT, TweetFields.CREATED_AT,
- TweetFields.COUNTRY };
+ private static final String[] DEFAULT_FIELDS =
+ new String[] { TweetFields.TWEETID, TweetFields.USER, TweetFields.LATITUDE, TweetFields.LONGITUDE,
+ TweetFields.MESSAGE_TEXT, TweetFields.CREATED_AT, TweetFields.COUNTRY };
private int id;
private TwitterUser user;
@@ -1183,14 +1182,13 @@
"Hallam", "Delores", "Cressida", "Carlyle", "Leann", "Kelcey", "Laurence", "Ryan", "Reynold", "Mark",
"Collyn", "Audie", "Sammy", "Ellery", "Sallie", "Pamelia", "Adolph", "Lydia", "Titania", "Ron", "Bridger",
"Aline", "Read", "Kelleigh", "Weldon", "Irving", "Garey", "Diggory", "Evander", "Kylee", "Deidre", "Ormond",
- "Laurine", "Reannon", "Arline", "Pat"};
+ "Laurine", "Reannon", "Arline", "Pat" };
public static final String[] jargon = { "wireless", "signal", "network", "3G", "plan", "touch-screen",
"customer-service", "reachability", "voice-command", "shortcut-menu", "customization", "platform", "speed",
"voice-clarity", "voicemail-service" };
- public static final String[] vendors = { "at&t", "verizon", "t-mobile", "sprint", "motorola", "samsung",
- "iphone" };
+ public static final String[] vendors = { "at&t", "verizon", "t-mobile", "sprint", "motorola", "samsung", "iphone" };
public static final String[] org_list = { "Latsonity", "ganjalax", "Zuncan", "Lexitechno", "Hot-tech", "subtam",
"Coneflex", "Ganjatax", "physcane", "Tranzap", "Qvohouse", "Zununoing", "jaydax", "Keytech", "goldendexon",
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java
index 17056a0..cb64604 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/generator/TweetGenerator.java
@@ -99,7 +99,7 @@
outputBuffer.limit(32 * 1024);
}
- public boolean generateNextBatch(int numTweets) throws IOException{
+ public boolean generateNextBatch(int numTweets) throws IOException {
boolean moreData = tweetIterator.hasNext();
if (!moreData) {
if (outputBuffer.position() > 0) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java
index a15ec6e..a5bff74 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/FileOffsetIndexer.java
@@ -42,11 +42,11 @@
protected RecordReader<?, ? extends Writable> recordReader;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<IAObject> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ private ISerializerDeserializer<IAObject> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<IAObject> longSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<IAObject> longSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void reset(IIndexingDatasource dataSource) throws HyracksDataException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
index 353e3ef..a788a29 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/IndexingScheduler.java
@@ -157,10 +157,10 @@
*/
private void scheduleNonLocalSlots(InputSplit[] splits, final int[] workloads, String[] locations, int slotLimit,
boolean[] scheduled, final HashMap<String, Integer> locationToNumOfAssignement)
- throws IOException, UnknownHostException {
+ throws IOException, UnknownHostException {
- PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(NCs.length,
- Comparator.comparing(locationToNumOfAssignement::get));
+ PriorityQueue<String> scheduleCadndiates =
+ new PriorityQueue<String>(NCs.length, Comparator.comparing(locationToNumOfAssignement::get));
scheduleCadndiates.addAll(Arrays.asList(NCs));
/*
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java
index 273901c..e6146d4 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordColumnarIndexer.java
@@ -45,11 +45,11 @@
protected RecordReader<?, Writable> recordReader;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<IAObject> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ private ISerializerDeserializer<IAObject> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<IAObject> longSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<IAObject> longSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void reset(IIndexingDatasource reader) throws HyracksDataException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordIdReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordIdReader.java
index 5de2d9d..3cca7e1 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordIdReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/indexing/RecordIdReader.java
@@ -57,8 +57,8 @@
public RecordId read(int index) throws HyracksDataException {
tupleStartOffset = tupleAccessor.getTupleStartOffset(index) + fieldSlotsLength;
- int fileNumberStartOffset = tupleAccessor.getFieldStartOffset(index,
- ridFields[IndexingConstants.FILE_NUMBER_FIELD_INDEX]);
+ int fileNumberStartOffset =
+ tupleAccessor.getFieldStartOffset(index, ridFields[IndexingConstants.FILE_NUMBER_FIELD_INDEX]);
frameBuffer = tupleAccessor.getBuffer();
if (frameBuffer.get(tupleStartOffset + fileNumberStartOffset) == MISSING_BYTE) {
return null;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java
index ebf1d17..f9bc8b2 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/RecordWithMetadataAndPK.java
@@ -57,24 +57,24 @@
// Serializers
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADouble> doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ private final ISerializerDeserializer<ADouble> doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
private final AMutableDouble mutableDouble = new AMutableDouble(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ private final ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
private final AMutableString mutableString = new AMutableString(null);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt32> int32Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ private final ISerializerDeserializer<AInt32> int32Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
private final AMutableInt32 mutableInt = new AMutableInt32(0);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ protected ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 mutableLong = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private final ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
private final int[] keyIndicator;
public RecordWithMetadataAndPK(final IRawRecord<T> record, final IAType[] metaTypes, final ARecordType recordType,
@@ -145,8 +145,8 @@
throws IOException {
if (length == 0) {
if (!NonTaggedFormatUtil.isOptional(metaTypes[index])) {
- throw new RuntimeDataException(
- ErrorCode.INPUT_RECORD_RECORD_WITH_METADATA_AND_PK_NULL_IN_NON_OPTIONAL, index);
+ throw new RuntimeDataException(ErrorCode.INPUT_RECORD_RECORD_WITH_METADATA_AND_PK_NULL_IN_NON_OPTIONAL,
+ index);
}
fieldValueBufferOutputs[index].writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} else {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPMessageToRecordConverter.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPMessageToRecordConverter.java
index f948c1c..e90b168 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPMessageToRecordConverter.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/converter/DCPMessageToRecordConverter.java
@@ -56,8 +56,8 @@
public DCPMessageToRecordConverter() {
this.value = new CharArrayRecord();
- this.recordWithMetadata = new RecordWithMetadataAndPK<>(value, CB_META_TYPES,
- RecordUtil.FULLY_OPEN_RECORD_TYPE, PK_INDICATOR, PK_INDEXES, PK_TYPES);
+ this.recordWithMetadata = new RecordWithMetadataAndPK<>(value, CB_META_TYPES, RecordUtil.FULLY_OPEN_RECORD_TYPE,
+ PK_INDICATOR, PK_INDEXES, PK_TYPES);
}
@Override
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractCharRecordLookupReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractCharRecordLookupReader.java
index f9ec114..6c7c42e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractCharRecordLookupReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/AbstractCharRecordLookupReader.java
@@ -64,7 +64,7 @@
reusableByteBuffer.clear();
if (reusableByteBuffer.remaining() < value.getLength()) {
reusableByteBuffer = ByteBuffer
- .allocateDirect((int)(value.getLength() * ExternalDataConstants.DEFAULT_BUFFER_INCREMENT_FACTOR));
+ .allocateDirect((int) (value.getLength() * ExternalDataConstants.DEFAULT_BUFFER_INCREMENT_FACTOR));
}
reusableByteBuffer.put(value.getBytes(), 0, value.getLength());
reusableByteBuffer.flip();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
index b847b23..1f97300 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
@@ -106,7 +106,7 @@
this.modified = modified;
}
- private SyndEntry getNextRSSFeed() throws IOException, FeedException, FetcherException {
+ private SyndEntry getNextRSSFeed() throws IOException, FeedException, FetcherException {
if (rssFeedBuffer.isEmpty()) {
fetchFeed();
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
index e64c79e..07b6250 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
@@ -29,8 +29,8 @@
public class EmptyLineSeparatedRecordReader extends StreamRecordReader {
- private static final List<String> recordReaderFormats = Collections
- .unmodifiableList(Arrays.asList(ExternalDataConstants.FORMAT_LINE_SEPARATED));
+ private static final List<String> recordReaderFormats =
+ Collections.unmodifiableList(Arrays.asList(ExternalDataConstants.FORMAT_LINE_SEPARATED));
private static final String REQUIRED_CONFIGS = "";
protected Map<String, String> config;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
index e1e1f08..cc4b7f9 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
@@ -39,9 +39,9 @@
private char recordStart;
private char recordEnd;
private int recordNumber = 0;
- private static final List<String> recordReaderFormats = Collections
- .unmodifiableList(Arrays.asList(ExternalDataConstants.FORMAT_ADM, ExternalDataConstants.FORMAT_JSON,
- ExternalDataConstants.FORMAT_SEMISTRUCTURED));
+ private static final List<String> recordReaderFormats =
+ Collections.unmodifiableList(Arrays.asList(ExternalDataConstants.FORMAT_ADM,
+ ExternalDataConstants.FORMAT_JSON, ExternalDataConstants.FORMAT_SEMISTRUCTURED));
private static final String REQUIRED_CONFIGS = "";
@Override
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
index 0591775..776aa0c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/StreamRecordReaderFactory.java
@@ -94,8 +94,8 @@
public IRecordReader<? extends char[]> createRecordReader(IHyracksTaskContext ctx, int partition)
throws HyracksDataException {
try {
- StreamRecordReader streamRecordReader = (StreamRecordReader) recordReaderClazz.getConstructor()
- .newInstance();
+ StreamRecordReader streamRecordReader =
+ (StreamRecordReader) recordReaderClazz.getConstructor().newInstance();
streamRecordReader.configure(streamFactory.createInputStream(ctx, partition), configuration);
return streamRecordReader;
} catch (InstantiationException | IllegalAccessException | InvocationTargetException
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java
index 0b9c4a2..49d851d 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/TwitterFirehoseInputStream.java
@@ -40,8 +40,7 @@
private final DataProvider dataProvider;
private boolean started;
- public TwitterFirehoseInputStream(Map<String, String> configuration, int partition)
- throws IOException {
+ public TwitterFirehoseInputStream(Map<String, String> configuration, int partition) throws IOException {
executorService = Executors.newCachedThreadPool();
outputStream = new PipedOutputStream();
inputStream = new PipedInputStream(outputStream);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
index c644413..63028ac 100755
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunction.java
@@ -56,8 +56,7 @@
protected final JavaFunctionHelper functionHelper;
public ExternalFunction(IExternalFunctionInfo finfo, IScalarEvaluatorFactory args[], IHyracksTaskContext context,
- IApplicationContext appCtx)
- throws HyracksDataException {
+ IApplicationContext appCtx) throws HyracksDataException {
this.finfo = finfo;
this.evaluatorFactories = args;
argumentEvaluators = new IScalarEvaluator[args.length];
@@ -78,8 +77,7 @@
externalFunctionFactory = (IFunctionFactory) clazz.newInstance();
externalFunction = externalFunctionFactory.getExternalFunction();
} catch (Exception e) {
- throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNABLE_TO_LOAD_CLASS, e,
- classname);
+ throw new RuntimeDataException(ErrorCode.LIBRARY_EXTERNAL_FUNCTION_UNABLE_TO_LOAD_CLASS, e, classname);
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java
index 916e668..bc53f0b 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JTypeObjectFactory.java
@@ -47,7 +47,6 @@
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.util.container.IObjectFactory;
-
public class JTypeObjectFactory implements IObjectFactory<IJObject, IAType> {
public static final JTypeObjectFactory INSTANCE = new JTypeObjectFactory();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java
index a369888..617bbbc 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/JavaFunctionHelper.java
@@ -52,8 +52,8 @@
private final IDataOutputProvider outputProvider;
private final IJObject[] arguments;
private IJObject resultHolder;
- private final IObjectPool<IJObject, IAType> objectPool = new ListObjectPool<IJObject, IAType>(
- JTypeObjectFactory.INSTANCE);
+ private final IObjectPool<IJObject, IAType> objectPool =
+ new ListObjectPool<IJObject, IAType>(JTypeObjectFactory.INSTANCE);
private final JObjectPointableVisitor pointableVisitor;
private final PointableAllocator pointableAllocator;
private final Map<Integer, TypeInfo> poolTypeInfo;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectPointableVisitor.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectPointableVisitor.java
index dc8e461..960b346 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectPointableVisitor.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/JObjectPointableVisitor.java
@@ -41,8 +41,10 @@
public class JObjectPointableVisitor implements IVisitablePointableVisitor<IJObject, TypeInfo> {
private final Map<ATypeTag, IJObjectAccessor> flatJObjectAccessors = new HashMap<ATypeTag, IJObjectAccessor>();
- private final Map<IVisitablePointable, IJRecordAccessor> raccessorToJObject = new HashMap<IVisitablePointable, IJRecordAccessor>();
- private final Map<IVisitablePointable, IJListAccessor> laccessorToPrinter = new HashMap<IVisitablePointable, IJListAccessor>();
+ private final Map<IVisitablePointable, IJRecordAccessor> raccessorToJObject =
+ new HashMap<IVisitablePointable, IJRecordAccessor>();
+ private final Map<IVisitablePointable, IJListAccessor> laccessorToPrinter =
+ new HashMap<IVisitablePointable, IJListAccessor>();
@Override
public IJObject visit(AListVisitablePointable accessor, TypeInfo arg) throws HyracksDataException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
index d964788..c0ccf11 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
@@ -65,12 +65,12 @@
private final ArrayBackedValueStorage castBuffer = new ArrayBackedValueStorage();
- private final IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<IARecordBuilder, ATypeTag>(
- new RecordBuilderFactory());
- private final IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool = new ListObjectPool<IAsterixListBuilder, ATypeTag>(
- new ListBuilderFactory());
- private final IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool = new ListObjectPool<IMutableValueStorage, ATypeTag>(
- new AbvsBuilderFactory());
+ private final IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool =
+ new ListObjectPool<IARecordBuilder, ATypeTag>(new RecordBuilderFactory());
+ private final IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool =
+ new ListObjectPool<IAsterixListBuilder, ATypeTag>(new ListBuilderFactory());
+ private final IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool =
+ new ListObjectPool<IMutableValueStorage, ATypeTag>(new AbvsBuilderFactory());
private final String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
private final String mismatchErrorMessage2 = " got a value of type ";
@@ -198,8 +198,7 @@
if (checkType(ATypeTag.BOOLEAN, objectType)) {
booleanSerde.serialize(ABoolean.TRUE, out);
} else {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
}
break;
case AdmLexer.TOKEN_BOOLEAN_CONS:
@@ -209,8 +208,7 @@
if (checkType(ATypeTag.BOOLEAN, objectType)) {
booleanSerde.serialize(ABoolean.FALSE, out);
} else {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
}
break;
case AdmLexer.TOKEN_DOUBLE_LITERAL:
@@ -256,19 +254,18 @@
break;
case AdmLexer.TOKEN_STRING_LITERAL:
if (checkType(ATypeTag.STRING, objectType)) {
- String tokenImage = admLexer.getLastTokenImage().substring(1,
- admLexer.getLastTokenImage().length() - 1);
+ String tokenImage =
+ admLexer.getLastTokenImage().substring(1, admLexer.getLastTokenImage().length() - 1);
aString.setValue(admLexer.containsEscapes() ? replaceEscapes(tokenImage) : tokenImage);
stringSerde.serialize(aString, out);
} else if (checkType(ATypeTag.UUID, objectType)) {
// Dealing with UUID type that is represented by a string
- String tokenImage = admLexer.getLastTokenImage().substring(1,
- admLexer.getLastTokenImage().length() - 1);
+ String tokenImage =
+ admLexer.getLastTokenImage().substring(1, admLexer.getLastTokenImage().length() - 1);
aUUID.parseUUIDString(tokenImage);
uuidSerde.serialize(aUUID, out);
} else {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
}
break;
case AdmLexer.TOKEN_STRING_CONS:
@@ -286,8 +283,7 @@
}
}
}
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
case AdmLexer.TOKEN_DATE_CONS:
parseConstructor(ATypeTag.DATE, objectType, out);
break;
@@ -302,8 +298,7 @@
objectType = getComplexType(objectType, ATypeTag.INTERVAL);
parseInterval(ATypeTag.INTERVAL, objectType, out);
} else {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
}
break;
case AdmLexer.TOKEN_DURATION_CONS:
@@ -338,8 +333,7 @@
objectType = getComplexType(objectType, ATypeTag.MULTISET);
parseUnorderedList((AUnorderedListType) objectType, out);
} else {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
}
break;
case AdmLexer.TOKEN_START_ORDERED_LIST:
@@ -347,8 +341,7 @@
objectType = getComplexType(objectType, ATypeTag.ARRAY);
parseOrderedList((AOrderedListType) objectType, out);
} else {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
}
break;
case AdmLexer.TOKEN_START_RECORD:
@@ -356,8 +349,7 @@
objectType = getComplexType(objectType, ATypeTag.OBJECT);
parseRecord((ARecordType) objectType, out);
} else {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH,
- objectType.getTypeName());
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_TYPE_MISMATCH, objectType.getTypeName());
}
break;
case AdmLexer.TOKEN_UUID_CONS:
@@ -407,8 +399,7 @@
readpos += 4;
break;
default:
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_ILLEGAL_ESCAPE,
- chars[readpos + 1]);
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_ILLEGAL_ESCAPE, chars[readpos + 1]);
}
++readpos;
movemarker = readpos + 1;
@@ -517,12 +508,12 @@
expectingRecordField = false;
if (recType != null) {
- String fldName = admLexer.getLastTokenImage().substring(1,
- admLexer.getLastTokenImage().length() - 1);
+ String fldName =
+ admLexer.getLastTokenImage().substring(1, admLexer.getLastTokenImage().length() - 1);
fieldId = recBuilder.getFieldId(fldName);
if ((fieldId < 0) && !recType.isOpen()) {
- throw new ParseException(
- ErrorCode.PARSER_ADM_DATA_PARSER_EXTRA_FIELD_IN_CLOSED_RECORD, fldName);
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_EXTRA_FIELD_IN_CLOSED_RECORD,
+ fldName);
} else if ((fieldId < 0) && recType.isOpen()) {
aStringFieldName.setValue(admLexer.getLastTokenImage().substring(1,
admLexer.getLastTokenImage().length() - 1));
@@ -545,8 +536,7 @@
token = admLexer.next();
if (token != AdmLexer.TOKEN_COLON) {
- throw new ParseException(
- ErrorCode.PARSER_ADM_DATA_PARSER_UNEXPECTED_TOKEN_WHEN_EXPECT_COMMA,
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_UNEXPECTED_TOKEN_WHEN_EXPECT_COMMA,
AdmLexer.tokenKindToString(token));
}
@@ -561,12 +551,10 @@
break;
case AdmLexer.TOKEN_COMMA:
if (first) {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_FOUND_COMMA_WHEN,
- "before any");
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_FOUND_COMMA_WHEN, "before any");
}
if (expectingRecordField) {
- throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_FOUND_COMMA_WHEN,
- "expecting a");
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_FOUND_COMMA_WHEN, "expecting a");
}
expectingRecordField = true;
break;
@@ -675,8 +663,8 @@
String arg = admLexer.getLastTokenImage();
switch (tag) {
case DATE:
- chrononTimeInMs += (parseDatePart(arg, 0, arg.length() - 1)
- / GregorianCalendarSystem.CHRONON_OF_DAY);
+ chrononTimeInMs +=
+ (parseDatePart(arg, 0, arg.length() - 1) / GregorianCalendarSystem.CHRONON_OF_DAY);
break;
case TIME:
chrononTimeInMs += parseTimePart(arg, 0, arg.length() - 1);
@@ -687,8 +675,8 @@
throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_INTERVAL_INVALID_DATETIME);
}
chrononTimeInMs += parseDatePart(arg, 0, timeSeperatorOffsetInDatetimeString - 1);
- chrononTimeInMs += parseTimePart(arg, timeSeperatorOffsetInDatetimeString + 1,
- arg.length() - 1);
+ chrononTimeInMs +=
+ parseTimePart(arg, timeSeperatorOffsetInDatetimeString + 1, arg.length() - 1);
break;
default:
throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_INTERVAL_UNSUPPORTED_TYPE);
@@ -874,18 +862,17 @@
if (token == AdmLexer.TOKEN_CONSTRUCTOR_OPEN) {
token = admLexer.next();
if (token == AdmLexer.TOKEN_STRING_LITERAL) {
- String unquoted = admLexer.getLastTokenImage().substring(1,
- admLexer.getLastTokenImage().length() - 1);
+ String unquoted =
+ admLexer.getLastTokenImage().substring(1, admLexer.getLastTokenImage().length() - 1);
if (!parseValue(unquoted, typeTag, dataOutput)) {
- throw new ParseException(
- ErrorCode.PARSER_ADM_DATA_PARSER_CONSTRUCTOR_MISSING_DESERIALIZER,
+ throw new ParseException(ErrorCode.PARSER_ADM_DATA_PARSER_CONSTRUCTOR_MISSING_DESERIALIZER,
AdmLexer.tokenKindToString(token));
}
token = admLexer.next();
if (token == AdmLexer.TOKEN_CONSTRUCTOR_CLOSE) {
if (targetTypeTag != typeTag) {
- ITypeConvertComputer promoteComputer = ATypeHierarchy.getTypePromoteComputer(typeTag,
- targetTypeTag);
+ ITypeConvertComputer promoteComputer =
+ ATypeHierarchy.getTypePromoteComputer(typeTag, targetTypeTag);
// the availability if the promote computer should be consistent with
// the availability of a target type
assert promoteComputer != null;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java
index 1cbe364..8351931 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/AbstractDataParser.java
@@ -110,35 +110,35 @@
// Serializers
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ADouble> doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ protected ISerializerDeserializer<ADouble> doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getAStringSerializerDeserializer();
+ protected ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getAStringSerializerDeserializer();
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABinary> binarySerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABINARY);
+ protected ISerializerDeserializer<ABinary> binarySerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABINARY);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AFloat> floatSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
+ protected ISerializerDeserializer<AFloat> floatSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AFLOAT);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt8> int8Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT8);
+ protected ISerializerDeserializer<AInt8> int8Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT8);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt16> int16Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT16);
+ protected ISerializerDeserializer<AInt16> int16Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT16);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> int32Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ protected ISerializerDeserializer<AInt32> int32Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ protected ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ protected ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ANull> nullSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ protected ISerializerDeserializer<ANull> nullSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
protected final HexParser hexParser = new HexParser();
protected final Base64Parser base64Parser = new Base64Parser();
@@ -147,23 +147,23 @@
// (xxxxxxxx-xxxx-xxxx-xxxxxxxxxxxx) when parsing the data.
// Thus, we need to call UUID.fromStringToAMutableUUID() to convert it to the internal representation (byte []).
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AUUID> uuidSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AUUID);
+ protected ISerializerDeserializer<AUUID> uuidSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AUUID);
// To avoid race conditions, the serdes for temporal and spatial data types needs to be one per parser
// ^^^^^^^^^^^^^^^^^^^^^^^^ ??? then why all these serdes are static?
@SuppressWarnings("unchecked")
- protected static final ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ protected static final ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
@SuppressWarnings("unchecked")
- protected static final ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ protected static final ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
@SuppressWarnings("unchecked")
- protected static final ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
+ protected static final ISerializerDeserializer<ADateTime> datetimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
@SuppressWarnings("unchecked")
- protected static final ISerializerDeserializer<ADuration> durationSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADURATION);
+ protected static final ISerializerDeserializer<ADuration> durationSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
@SuppressWarnings("unchecked")
protected static final ISerializerDeserializer<ADayTimeDuration> dayTimeDurationSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADAYTIMEDURATION);
@@ -171,23 +171,23 @@
protected static final ISerializerDeserializer<AYearMonthDuration> yearMonthDurationSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AYEARMONTHDURATION);
@SuppressWarnings("unchecked")
- protected final static ISerializerDeserializer<APoint> pointSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.APOINT);
+ protected final static ISerializerDeserializer<APoint> pointSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.APOINT);
@SuppressWarnings("unchecked")
- protected final static ISerializerDeserializer<APoint3D> point3DSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.APOINT3D);
+ protected final static ISerializerDeserializer<APoint3D> point3DSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.APOINT3D);
@SuppressWarnings("unchecked")
- protected final static ISerializerDeserializer<ACircle> circleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ACIRCLE);
+ protected final static ISerializerDeserializer<ACircle> circleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ACIRCLE);
@SuppressWarnings("unchecked")
- protected final static ISerializerDeserializer<ARectangle> rectangleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ARECTANGLE);
+ protected final static ISerializerDeserializer<ARectangle> rectangleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ARECTANGLE);
@SuppressWarnings("unchecked")
- protected final static ISerializerDeserializer<ALine> lineSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ALINE);
+ protected final static ISerializerDeserializer<ALine> lineSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ALINE);
@SuppressWarnings("unchecked")
- protected static final ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ protected static final ISerializerDeserializer<AInterval> intervalSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
protected String filename;
@@ -224,8 +224,8 @@
}
}
long chrononTimeInMs = ADateParserFactory.parseDatePart(datetime, 0, timeOffset);
- chrononTimeInMs += ATimeParserFactory.parseTimePart(datetime, timeOffset + 1,
- datetime.length() - timeOffset - 1);
+ chrononTimeInMs +=
+ ATimeParserFactory.parseTimePart(datetime, timeOffset + 1, datetime.length() - timeOffset - 1);
aDateTime.setValue(chrononTimeInMs);
datetimeSerde.serialize(aDateTime, out);
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
index 0df1412..a7dbc30 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/DelimitedDataParser.java
@@ -140,8 +140,8 @@
// NULL. Note that string type can also process empty field as an
// empty string
if (!NonTaggedFormatUtil.isOptional(recordType.getFieldTypes()[i])) {
- throw new RuntimeDataException(ErrorCode.PARSER_DELIMITED_NONOPTIONAL_NULL,
- cursor.recordCount, cursor.fieldCount);
+ throw new RuntimeDataException(ErrorCode.PARSER_DELIMITED_NONOPTIONAL_NULL, cursor.recordCount,
+ cursor.fieldCount);
}
fieldValueBufferOutput.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} else {
@@ -152,8 +152,8 @@
cursor.fEnd -= cursor.doubleQuoteCount;
cursor.isDoubleQuoteIncludedInThisField = false;
}
- valueParsers[i]
- .parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart, fieldValueBufferOutput);
+ valueParsers[i].parse(cursor.buffer, cursor.fStart, cursor.fEnd - cursor.fStart,
+ fieldValueBufferOutput);
areAllNullFields = false;
}
if (fldIds[i] < 0) {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
index ed33401..c92ee12 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/RecordWithMetadataParser.java
@@ -49,8 +49,8 @@
private final ArrayBackedValueStorage[] metaFieldsNamesBuffers;
private final int numberOfMetaFields;
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ private final ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
public RecordWithMetadataParser(ARecordType metaType, IRecordDataParser<O> valueParser,
IRecordConverter<T, RecordWithMetadataAndPK<O>> converter) throws HyracksDataException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
index be49863..e1c961a 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/TweetParser.java
@@ -48,12 +48,12 @@
import java.util.Iterator;
public class TweetParser extends AbstractDataParser implements IRecordDataParser<String> {
- private final IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<>(
- new RecordBuilderFactory());
- private final IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool = new ListObjectPool<>(
- new ListBuilderFactory());
- private final IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool = new ListObjectPool<>(
- new AbvsBuilderFactory());
+ private final IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool =
+ new ListObjectPool<>(new RecordBuilderFactory());
+ private final IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool =
+ new ListObjectPool<>(new ListBuilderFactory());
+ private final IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool =
+ new ListObjectPool<>(new AbvsBuilderFactory());
private ARecordType recordType;
private UTF8StringWriter utf8Writer = new UTF8StringWriter();
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
index bba8ccc..f406729 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/DelimitedDataParserFactory.java
@@ -39,8 +39,8 @@
public class DelimitedDataParserFactory extends AbstractRecordStreamParserFactory<char[]> {
private static final long serialVersionUID = 1L;
- private static final List<String> parserFormats = Collections
- .unmodifiableList(Arrays.asList("csv", "delimited-text"));
+ private static final List<String> parserFormats =
+ Collections.unmodifiableList(Arrays.asList("csv", "delimited-text"));
@Override
public IRecordDataParser<char[]> createRecordParser(IHyracksTaskContext ctx) throws HyracksDataException {
@@ -73,8 +73,8 @@
if (delimiterValue == null) {
delimiterValue = ExternalDataConstants.DEFAULT_DELIMITER;
} else if (delimiterValue.length() != 1) {
- throw new RuntimeDataException(
- ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_NOT_VALID_DELIMITER, delimiterValue);
+ throw new RuntimeDataException(ErrorCode.PARSER_FACTORY_DELIMITED_DATA_PARSER_FACTORY_NOT_VALID_DELIMITER,
+ delimiterValue);
}
return delimiterValue.charAt(0);
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java
index c944554..489749c 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/parser/factory/HiveDataParserFactory.java
@@ -38,8 +38,8 @@
public class HiveDataParserFactory implements IRecordDataParserFactory<Writable> {
private static final long serialVersionUID = 1L;
- private static final List<String> parserFormats = Collections
- .unmodifiableList(Arrays.asList("hive", "hive-parser"));
+ private static final List<String> parserFormats =
+ Collections.unmodifiableList(Arrays.asList("hive", "hive-parser"));
private Map<String, String> configuration;
private ARecordType recordType;
private String hiveSerdeClassName;
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
index d6ac5d1..5740143 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
@@ -68,8 +68,8 @@
Map<String, String> configuration, ARecordType recordType, int[] ridFields, boolean retainInput,
boolean retainMissing, IMissingWriterFactory missingWriterFactory)
throws HyracksDataException, AlgebricksException {
- LookupAdapterFactory<?> adapterFactory = new LookupAdapterFactory<>(recordType, ridFields, retainInput,
- retainMissing, missingWriterFactory);
+ LookupAdapterFactory<?> adapterFactory =
+ new LookupAdapterFactory<>(recordType, ridFields, retainInput, retainMissing, missingWriterFactory);
adapterFactory.configure(serviceCtx, configuration);
return adapterFactory;
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
index 859c9fd..c9dafbc 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
@@ -68,8 +68,8 @@
IInputStreamFactory streamSourceFactory;
if (ExternalDataUtils.isExternal(streamSource)) {
String dataverse = ExternalDataUtils.getDataverse(configuration);
- streamSourceFactory = ExternalDataUtils.createExternalInputStreamFactory(libraryManager, dataverse,
- streamSource);
+ streamSourceFactory =
+ ExternalDataUtils.createExternalInputStreamFactory(libraryManager, dataverse, streamSource);
} else {
switch (streamSource) {
case ExternalDataConstants.STREAM_LOCAL_FILESYSTEM:
@@ -87,8 +87,7 @@
streamSourceFactory = (IInputStreamFactory) Class.forName(streamSource).newInstance();
} catch (Exception e) {
throw new RuntimeDataException(
- ErrorCode.PROVIDER_DATASOURCE_FACTORY_UNKNOWN_INPUT_STREAM_FACTORY, e,
- streamSource);
+ ErrorCode.PROVIDER_DATASOURCE_FACTORY_UNKNOWN_INPUT_STREAM_FACTORY, e, streamSource);
}
}
}
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
index 701d0f2..4165fa8 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/provider/StreamRecordReaderProvider.java
@@ -39,8 +39,8 @@
public class StreamRecordReaderProvider {
- private static final String RESOURCE = "META-INF/services/org.apache.asterix.external.input.record."
- + "reader.stream.StreamRecordReader";
+ private static final String RESOURCE =
+ "META-INF/services/org.apache.asterix.external.input.record." + "reader.stream.StreamRecordReader";
private static Map<String, List<Pair<String[], Class>>> recordReaders = null;
protected static StreamRecordReader getInstance(Class clazz) throws AsterixException {
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
index 2cb842b..23c168e 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/LocalFileSystemUtils.java
@@ -35,12 +35,11 @@
public class LocalFileSystemUtils {
- public static void traverse(final List<File> files, File root, final String expression,
- final LinkedList<Path> dirs) throws IOException {
+ public static void traverse(final List<File> files, File root, final String expression, final LinkedList<Path> dirs)
+ throws IOException {
final Path path = root.toPath();
if (!Files.exists(path)) {
- throw new RuntimeDataException(ErrorCode.UTIL_LOCAL_FILE_SYSTEM_UTILS_PATH_NOT_FOUND,
- path.toString());
+ throw new RuntimeDataException(ErrorCode.UTIL_LOCAL_FILE_SYSTEM_UTILS_PATH_NOT_FOUND, path.toString());
}
if (!Files.isDirectory(path)) {
validateAndAdd(path, expression, files);
diff --git a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
index 8cc3466..f64daf3 100644
--- a/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
+++ b/asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/util/TwitterUtil.java
@@ -87,7 +87,8 @@
public static double[][] getBoundingBoxes(String locationValue) throws AsterixException {
double[][] locations = null;
- String coordRegex = "^((((\\-?\\d+\\.\\d+),\\s*){3}(\\-?\\d+\\.\\d+)|\\w+);\\s*)*(((\\-?\\d+\\.\\d+),\\s*){3}(\\-?\\d+\\.\\d+)|\\w+)$";
+ String coordRegex =
+ "^((((\\-?\\d+\\.\\d+),\\s*){3}(\\-?\\d+\\.\\d+)|\\w+);\\s*)*(((\\-?\\d+\\.\\d+),\\s*){3}(\\-?\\d+\\.\\d+)|\\w+)$";
Pattern p = Pattern.compile(coordRegex);
Matcher m = p.matcher(locationValue);
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
index 7ac637f..bc24227 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
@@ -105,8 +105,8 @@
specialAttrNames.add(ATTR_PARENT);
}
- public static final FunctionCall curr_time_expr = FunctionCall.createFunctionCall("time",
- new ExprList(new ClassAdObjectPool()), new ClassAdObjectPool());
+ public static final FunctionCall curr_time_expr =
+ FunctionCall.createFunctionCall("time", new ExprList(new ClassAdObjectPool()), new ClassAdObjectPool());
private ClassAd alternateScope;
private final Map<CaseInsensitiveString, ExprTree> attrList;
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java
index 73ededd..562bd49 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java
@@ -529,8 +529,8 @@
}
// do evaluation
- sig = privateDoOperation(opKind.getIntegerValue(), val1, val2, val3, valid1, valid2, valid3, result,
- state, objectPool);
+ sig = privateDoOperation(opKind.getIntegerValue(), val1, val2, val3, valid1, valid2, valid3, result, state,
+ objectPool);
// delete trees which were not significant
if (valid1 && 0 != (sig & SigValues.SIG_CHLD1.ordinal())) {
@@ -553,8 +553,7 @@
// corresponding value is UNDEFINED or ERROR, propagate only that tree
if (isStrictOperator(opKind.getIntegerValue())) {
// strict unary operators: unary -, unary +, !, ~, ()
- if (opKind.getIntegerValue() == OpKind_UNARY_MINUS_OP
- || opKind.getIntegerValue() == OpKind_UNARY_PLUS_OP
+ if (opKind.getIntegerValue() == OpKind_UNARY_MINUS_OP || opKind.getIntegerValue() == OpKind_UNARY_PLUS_OP
|| opKind.getIntegerValue() == OpKind_LOGICAL_NOT_OP
|| opKind.getIntegerValue() == OpKind_BITWISE_NOT_OP
|| opKind.getIntegerValue() == OpKind_PARENTHESES_OP) {
@@ -588,15 +587,13 @@
}
} else {
// non-strict operators
- if (opKind.getIntegerValue() == OpKind_IS_OP
- || opKind.getIntegerValue() == OpKind_ISNT_OP) {
+ if (opKind.getIntegerValue() == OpKind_IS_OP || opKind.getIntegerValue() == OpKind_ISNT_OP) {
// the operation is *always* significant for IS and ISNT
tree.setInnerTree(createOperation(opKind.getIntegerValue(), t1, t2, objectPool));
return (true);
}
// other non-strict binary operators
- if (opKind.getIntegerValue() == OpKind_LOGICAL_AND_OP
- || opKind.getIntegerValue() == OpKind_LOGICAL_OR_OP) {
+ if (opKind.getIntegerValue() == OpKind_LOGICAL_AND_OP || opKind.getIntegerValue() == OpKind_LOGICAL_OR_OP) {
if ((SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD1.ordinal()) != 0
&& (SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD2.ordinal()) != 0) {
tree.setInnerTree(createOperation(opKind.getIntegerValue(), t1, t2, objectPool));
@@ -765,16 +762,15 @@
}
// cannot collapse values due to dissimilar ops
- if ((op1.getIntegerValue() != OpKind_NO_OP || op2.getIntegerValue() != OpKind_NO_OP)
- && !op.equals(op1) && !op.equals(op1)) {
+ if ((op1.getIntegerValue() != OpKind_NO_OP || op2.getIntegerValue() != OpKind_NO_OP) && !op.equals(op1)
+ && !op.equals(op1)) {
// at least one of them returned a value and a tree, and parent does
// not share the same operation with either child
ExprTreeHolder newOp1 = objectPool.mutableExprPool.get();
ExprTreeHolder newOp2 = objectPool.mutableExprPool.get();
if (op1.getIntegerValue() != OpKind_NO_OP) {
- newOp1.setInnerTree(
- Operation.createOperation(op1.getIntegerValue(), val1, tree1, objectPool));
+ newOp1.setInnerTree(Operation.createOperation(op1.getIntegerValue(), val1, tree1, objectPool));
} else if (tree1.getInnerTree() != null) {
newOp1.setInnerTree(tree1.getInnerTree());
} else {
@@ -782,8 +778,7 @@
}
if (op2.getIntegerValue() != OpKind_NO_OP) {
- newOp2.setInnerTree(
- Operation.createOperation(op2.getIntegerValue(), val2, tree2, objectPool));
+ newOp2.setInnerTree(Operation.createOperation(op2.getIntegerValue(), val2, tree2, objectPool));
} else if (tree2.getInnerTree() != null) {
newOp2.setInnerTree(tree2);
} else {
@@ -820,8 +815,7 @@
// leftson makes a tree,value contribution
if (tree2.getInnerTree() == null) {
// rightson makes a value contribution
- privateDoOperation(op.getIntegerValue(), val1, val2, dummy, true, true, false, val,
- objectPool);
+ privateDoOperation(op.getIntegerValue(), val1, val2, dummy, true, true, false, val, objectPool);
tree.setInnerTree(tree1);
return true;
} else {
@@ -840,8 +834,7 @@
// rightson makes a tree,value contribution
if (tree1.getInnerTree() == null) {
// leftson makes a value contribution
- privateDoOperation(op.getIntegerValue(), val1, val2, dummy, true, true, false, val,
- objectPool);
+ privateDoOperation(op.getIntegerValue(), val1, val2, dummy, true, true, false, val, objectPool);
tree.setInnerTree(tree2);
return true;
} else {
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
index f661d06..a4ac726 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
@@ -58,7 +58,7 @@
ClassAdObjectPool objectPool = new ClassAdObjectPool();
ClassAd pAd = new ClassAd(objectPool);
String szInput;
- String[] files = new String[] {"/classad/testdata.txt"};
+ String[] files = new String[] { "/classad/testdata.txt" };
BufferedReader infile = null;
for (String path : files) {
infile = Files.newBufferedReader(
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
index 3bbcf89..47f784a 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
@@ -90,8 +90,8 @@
public void testSchemaful() {
try {
File file = new File("target/classad-wtih-temporals.adm");
- File expected = new File(
- getClass().getResource("/classad/results/classad-with-temporals.adm").toURI().getPath());
+ File expected =
+ new File(getClass().getResource("/classad/results/classad-with-temporals.adm").toURI().getPath());
FileUtils.deleteQuietly(file);
PrintStream printStream = new PrintStream(Files.newOutputStream(Paths.get(file.toURI())));
String[] recordFieldNames = { "GlobalJobId", "Owner", "ClusterId", "ProcId", "RemoteWallClockTime",
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
index eaade6c..85b866e 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
@@ -332,7 +332,8 @@
System.out.println("Testing the ClassAd class...");
- String input_basic = "[ A = 3; B = 4.0; C = \"babyzilla\"; D = true; E = {1}; F = [ AA = 3; ]; G =\"deleteme\";]";
+ String input_basic =
+ "[ A = 3; B = 4.0; C = \"babyzilla\"; D = true; E = {1}; F = [ AA = 3; ]; G =\"deleteme\";]";
ClassAd basic = new ClassAd(objectPool);
AMutableInt64 i = new AMutableInt64(0);
MutableBoolean b = new MutableBoolean();
@@ -380,7 +381,8 @@
basic = null;
/* ----- Test GetExternalReferences ----- */
- String inputRef = "[ Rank=Member(\"LCG-2_1_0\",other.Environment) ? other.Time/seconds : other.Time/minutes; minutes=60; ]";
+ String inputRef =
+ "[ Rank=Member(\"LCG-2_1_0\",other.Environment) ? other.Time/seconds : other.Time/minutes; minutes=60; ]";
TreeSet<String> refs = new TreeSet<String>();
ExprTree rank;
@@ -431,7 +433,8 @@
// This ClassAd may cause problems. Perhaps a memory leak.
// This test is only useful when run under valgrind.
- String memoryProblemClassad = "[ Updates = [status = \"request_completed\"; timestamp = absTime(\"2004-12-16T18:10:59-0600]\")] ]";
+ String memoryProblemClassad =
+ "[ Updates = [status = \"request_completed\"; timestamp = absTime(\"2004-12-16T18:10:59-0600]\")] ]";
c = parser.parseClassAd(memoryProblemClassad);
/* ----- Test Parsing multiple ClassAds ----- */
@@ -441,11 +444,9 @@
AMutableInt32 offset = new AMutableInt32(0);
parser.parseClassAd(twoClassads, classad1, offset);
- test("Have good offset #1", offset.getIntegerValue() == 10, "Test Parsing multiple ClassAds 1",
- results);
+ test("Have good offset #1", offset.getIntegerValue() == 10, "Test Parsing multiple ClassAds 1", results);
parser.parseClassAd(twoClassads, classad2, offset);
- test("Have good offset #2", offset.getIntegerValue() == 20, "Test Parsing multiple ClassAds 2",
- results);
+ test("Have good offset #2", offset.getIntegerValue() == 20, "Test Parsing multiple ClassAds 2", results);
/* ----- Test chained ClassAds ----- */
// classad1 and classad2 from above test are used.
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java
index 665d50b..8e6f346 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/input/record/reader/TestAsterixMembersReader.java
@@ -38,8 +38,8 @@
private int counter = 0;
private final int numOfRecords = 10;
private final StringBuilder builder = new StringBuilder();
- private static final String[] names = { "Abdullah", "Michael", "Till", "Yingyi", "Ildar", "Taewoo", "Young-Seok",
- "Murtadha", "Ian", "Steven" };
+ private static final String[] names =
+ { "Abdullah", "Michael", "Till", "Yingyi", "Ildar", "Taewoo", "Young-Seok", "Murtadha", "Ian", "Steven" };
public TestAsterixMembersReader() {
rawRecord = new CharArrayRecord();
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/CapitalFinderFunction.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/CapitalFinderFunction.java
index ecd9b13..cc32f45 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/CapitalFinderFunction.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/CapitalFinderFunction.java
@@ -52,7 +52,8 @@
@Override
public void initialize(IFunctionHelper functionHelper) throws Exception {
- InputStream in = CapitalFinderFunction.class.getClassLoader().getResourceAsStream("data/countriesCapitals.properties");
+ InputStream in =
+ CapitalFinderFunction.class.getClassLoader().getResourceAsStream("data/countriesCapitals.properties");
capitalList = new Properties();
capitalList.load(in);
capital = (JString) functionHelper.getObject(JTypeTag.STRING);
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
index 04848a5..2f9faed 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
@@ -89,14 +89,12 @@
private final ClassAdObjectPool objectPool;
// asterix objects
private ARecordType recordType;
- private IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<IARecordBuilder, ATypeTag>(
- new RecordBuilderFactory());
+ private IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool =
+ new ListObjectPool<IARecordBuilder, ATypeTag>(new RecordBuilderFactory());
private IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool =
- new ListObjectPool<IAsterixListBuilder, ATypeTag>(
- new ListBuilderFactory());
+ new ListObjectPool<IAsterixListBuilder, ATypeTag>(new ListBuilderFactory());
private IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool =
- new ListObjectPool<IMutableValueStorage, ATypeTag>(
- new AbvsBuilderFactory());
+ new ListObjectPool<IMutableValueStorage, ATypeTag>(new AbvsBuilderFactory());
private final ClassAd rootAd;
private String exprPrefix = "expr=";
private String exprSuffix = "";
@@ -1351,7 +1349,7 @@
if (!parseArgumentList(argList)) {
tree.setInnerTree(null);
return false;
- };
+ } ;
// special case function-calls should be converted
// into a literal expression if the argument is a
// string literal
@@ -1399,7 +1397,7 @@
tree.setInnerTree(Operation.createOperation(Operation.OpKind_PARENTHESES_OP, treeL, objectPool));
return (tree.size() != 0);
}
- // constants
+ // constants
case LEX_OPEN_BOX: {
isExpr = true;
ClassAd newAd = objectPool.classAdPool.get();
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
index d822f84..90336fe 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapter.java
@@ -73,8 +73,7 @@
} else {
}
if (LOGGER.isWarnEnabled()) {
- LOGGER.warn(
- "Could not obtain input stream for parsing from adapter " + this + "[" + partition + "]");
+ LOGGER.warn("Could not obtain input stream for parsing from adapter " + this + "[" + partition + "]");
}
}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index 616ed6e..6fe938c 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@ -85,10 +85,12 @@
(IApplicationContext) ctx.getJobletContext().getServiceContext().getApplicationContext();
ClusterPartition nodePartition = appCtx.getMetadataProperties().getNodePartitions().get(nodeId)[0];
parser = new ADMDataParser(outputType, true);
- forwarder = DataflowUtils.getTupleForwarder(configuration,
- FeedUtils.getFeedLogManager(ctx,
- FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
- ExternalDataUtils.getFeedName(configuration), nodeId, nodePartition)));
+ forwarder =
+ DataflowUtils
+ .getTupleForwarder(configuration,
+ FeedUtils.getFeedLogManager(ctx, FeedUtils.splitsForAdapter(
+ ExternalDataUtils.getDataverse(configuration),
+ ExternalDataUtils.getFeedName(configuration), nodeId, nodePartition)));
tb = new ArrayTupleBuilder(1);
return new ITupleParser() {
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java
index 21a1430..fb222e6 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/TestRecordWithPKParser.java
@@ -37,8 +37,8 @@
}
@Override
- public void parse(final IRawRecord<? extends RecordWithPK<T>> record, final DataOutput out) throws
- HyracksDataException {
+ public void parse(final IRawRecord<? extends RecordWithPK<T>> record, final DataOutput out)
+ throws HyracksDataException {
recordParser.parse(record.get().getRecord(), out);
}
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
index 6f36eab..f06528e 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
@@ -38,22 +38,22 @@
@Test
public void test() throws IOException {
String[] dates = { "-9537-08-04", "9656-06-03", "-9537-04-04", "9656-06-04", "-9537-10-04", "9626-09-05" };
- AMutableDate[] parsedDates = new AMutableDate[] { new AMutableDate(-4202630), new AMutableDate(2807408),
- new AMutableDate(-4202752), new AMutableDate(2807409), new AMutableDate(-4202569),
- new AMutableDate(2796544), };
+ AMutableDate[] parsedDates =
+ new AMutableDate[] { new AMutableDate(-4202630), new AMutableDate(2807408), new AMutableDate(-4202752),
+ new AMutableDate(2807409), new AMutableDate(-4202569), new AMutableDate(2796544), };
String[] times = { "12:04:45.689Z", "12:41:59.002Z", "12:10:45.169Z", "15:37:48.736Z", "04:16:42.321Z",
"12:22:56.816Z" };
- AMutableTime[] parsedTimes = new AMutableTime[] { new AMutableTime(43485689), new AMutableTime(45719002),
- new AMutableTime(43845169), new AMutableTime(56268736), new AMutableTime(15402321),
- new AMutableTime(44576816), };
+ AMutableTime[] parsedTimes =
+ new AMutableTime[] { new AMutableTime(43485689), new AMutableTime(45719002), new AMutableTime(43845169),
+ new AMutableTime(56268736), new AMutableTime(15402321), new AMutableTime(44576816), };
String[] dateTimes = { "-2640-10-11T17:32:15.675Z", "4104-02-01T05:59:11.902Z", "0534-12-08T08:20:31.487Z",
"6778-02-16T22:40:21.653Z", "2129-12-12T13:18:35.758Z", "8647-07-01T13:10:19.691Z" };
- AMutableDateTime[] parsedDateTimes = new AMutableDateTime[] { new AMutableDateTime(-145452954464325L),
- new AMutableDateTime(67345192751902L), new AMutableDateTime(-45286270768513L),
- new AMutableDateTime(151729886421653L), new AMutableDateTime(5047449515758L),
- new AMutableDateTime(210721439419691L) };
+ AMutableDateTime[] parsedDateTimes =
+ new AMutableDateTime[] { new AMutableDateTime(-145452954464325L), new AMutableDateTime(67345192751902L),
+ new AMutableDateTime(-45286270768513L), new AMutableDateTime(151729886421653L),
+ new AMutableDateTime(5047449515758L), new AMutableDateTime(210721439419691L) };
Thread[] threads = new Thread[16];
AtomicInteger errorCount = new AtomicInteger(0);
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ParserFactoryProviderLoadParserTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ParserFactoryProviderLoadParserTest.java
index effb7cd..26cefa8 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ParserFactoryProviderLoadParserTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ParserFactoryProviderLoadParserTest.java
@@ -29,10 +29,10 @@
import org.junit.Assert;
import org.junit.Test;
-
public class ParserFactoryProviderLoadParserTest {
IDataParserFactory factory;
+
@Test
public void test() throws AsterixException {
boolean result = true;
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/StreamRecordReaderProviderTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/StreamRecordReaderProviderTest.java
index 71a5072..3a9d7a5 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/StreamRecordReaderProviderTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/StreamRecordReaderProviderTest.java
@@ -32,14 +32,11 @@
public class StreamRecordReaderProviderTest {
@Test
- public void Test() throws AsterixException{
- List<String> recordReaderFormats = Arrays.asList(
- ExternalDataConstants.FORMAT_LINE_SEPARATED,
- ExternalDataConstants.FORMAT_ADM,
- ExternalDataConstants.FORMAT_JSON,
- ExternalDataConstants.FORMAT_SEMISTRUCTURED,
- ExternalDataConstants.FORMAT_DELIMITED_TEXT,
- ExternalDataConstants.FORMAT_CSV);
+ public void Test() throws AsterixException {
+ List<String> recordReaderFormats =
+ Arrays.asList(ExternalDataConstants.FORMAT_LINE_SEPARATED, ExternalDataConstants.FORMAT_ADM,
+ ExternalDataConstants.FORMAT_JSON, ExternalDataConstants.FORMAT_SEMISTRUCTURED,
+ ExternalDataConstants.FORMAT_DELIMITED_TEXT, ExternalDataConstants.FORMAT_CSV);
Map<String, String> config = new HashMap<>();
for (String format : recordReaderFormats) {
config.clear();
diff --git a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/TweetParserTest.java b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/TweetParserTest.java
index 5389310..9916fa5 100644
--- a/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/TweetParserTest.java
+++ b/asterixdb/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/TweetParserTest.java
@@ -60,8 +60,8 @@
unionTypeList.add(ANULL);
unionTypeList.add(AMISSING);
IAType geoUnionType = new AUnionType(unionTypeList, "GeoType?");
- ARecordType tweetRecordType = new ARecordType("TweetType", new String[] { "id", "geo" },
- new IAType[] { AINT64, geoUnionType }, true);
+ ARecordType tweetRecordType =
+ new ARecordType("TweetType", new String[] { "id", "geo" }, new IAType[] { AINT64, geoUnionType }, true);
TweetParser parser = new TweetParser(tweetRecordType);
@@ -86,8 +86,8 @@
// contruct type
IAType geoFieldType = new ARecordType("GeoType", new String[] { "coordinates" },
new IAType[] { new AOrderedListType(AFLOAT, "point") }, true);
- ARecordType tweetRecordType = new ARecordType("TweetType", new String[] { "id", "geo" },
- new IAType[] { AINT64, geoFieldType }, true);
+ ARecordType tweetRecordType =
+ new ARecordType("TweetType", new String[] { "id", "geo" }, new IAType[] { AINT64, geoFieldType }, true);
TweetParser parser = new TweetParser(tweetRecordType);
List<String> lines = Files.readAllLines(Paths.get(getClass().getResource("/test_tweets.txt").toURI()));
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyFiltersJaccard.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyFiltersJaccard.java
index 770e9dd..5f50108 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyFiltersJaccard.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyFiltersJaccard.java
@@ -92,13 +92,13 @@
* @return
*/
public boolean passPositionFilter(int noGramsCommon, int positionX, int positionY, int lengthX, int lengthY) {
- return getIntersectUpperBound(noGramsCommon, positionX, positionY, lengthX, lengthY) >= getIntersectLowerBound(
- lengthX, lengthY);
+ return getIntersectUpperBound(noGramsCommon, positionX, positionY, lengthX,
+ lengthY) >= getIntersectLowerBound(lengthX, lengthY);
}
public boolean passPositionFilter(int noGramsCommon, long positionX, long positionY, long lengthX, long lengthY) {
- return getIntersectUpperBound(noGramsCommon, positionX, positionY, lengthX, lengthY) >= getIntersectLowerBound(
- lengthX, lengthY);
+ return getIntersectUpperBound(noGramsCommon, positionX, positionY, lengthX,
+ lengthY) >= getIntersectLowerBound(lengthX, lengthY);
}
}
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java
index 6778152..8be6f0c 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinAppendLength.java
@@ -45,8 +45,8 @@
String line;
while ((line = input.readLine()) != null) {
String[] splits = line.split(FuzzyJoinConfig.RECORD_SEPARATOR_REGEX);
- Collection<String> tokens = tokenizer
- .tokenize(FuzzyJoinUtil.getData(splits, dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
+ Collection<String> tokens =
+ tokenizer.tokenize(FuzzyJoinUtil.getData(splits, dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
output.write(splits[0] + FuzzyJoinConfig.RECORD_SEPARATOR + splits[1] + FuzzyJoinConfig.RECORD_SEPARATOR
+ splits[2] + FuzzyJoinConfig.RECORD_SEPARATOR + splits[3] + FuzzyJoinConfig.RECORD_SEPARATOR
+ tokens.size() + "\n");
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java
index e42ba51..02d3037 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinMemory.java
@@ -80,7 +80,7 @@
@SuppressWarnings("squid:S1166") // Either log or rethrow this exception
public static void readRecords(String fileName, List<int[]> records, List<Integer> rids) throws IOException {
try (LittleEndianIntInputStream in =
- new LittleEndianIntInputStream(new BufferedInputStream(new FileInputStream(fileName)))) {
+ new LittleEndianIntInputStream(new BufferedInputStream(new FileInputStream(fileName)))) {
while (true) {
int rid = 0;
@@ -161,9 +161,8 @@
count = -1;
}
// suffix filter
- if (count == 1
- && !similarityFilters.passSuffixFilter(tokens, indexToken, records.get(indexProbe),
- indexTokenProbe)) {
+ if (count == 1 && !similarityFilters.passSuffixFilter(tokens, indexToken,
+ records.get(indexProbe), indexTokenProbe)) {
count = -1;
}
counts.put(indexProbe, count);
@@ -268,9 +267,8 @@
count = -1;
}
// suffix filter
- if (count == 1
- && !similarityFilters.passSuffixFilter(tokens, indexToken, records.get(indexProbe),
- indexTokenProbe)) {
+ if (count == 1 && !similarityFilters.passSuffixFilter(tokens, indexToken,
+ records.get(indexProbe), indexTokenProbe)) {
count = -1;
}
counts.put(indexProbe, count);
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
index 64c7e52..4c85f25 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/FuzzyJoinTokenize.java
@@ -78,8 +78,9 @@
String line;
HashMap<String, MutableInteger> tokenCount = new HashMap<String, MutableInteger>();
while ((line = input.readLine()) != null) {
- Collection<String> tokens = tokenizer.tokenize(FuzzyJoinUtil.getData(
- line.split(FuzzyJoinConfig.RECORD_SEPARATOR_REGEX), dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
+ Collection<String> tokens =
+ tokenizer.tokenize(FuzzyJoinUtil.getData(line.split(FuzzyJoinConfig.RECORD_SEPARATOR_REGEX),
+ dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
for (String token : tokens) {
MutableInteger count = tokenCount.get(token);
@@ -108,14 +109,14 @@
tokenLoad.loadTokenRank();
input = new BufferedReader(new FileReader(inputFileName));
- LittleEndianIntOutputStream outputTokenized = new LittleEndianIntOutputStream(
- new BufferedOutputStream(new FileOutputStream(tokenizedFileName)));
+ LittleEndianIntOutputStream outputTokenized =
+ new LittleEndianIntOutputStream(new BufferedOutputStream(new FileOutputStream(tokenizedFileName)));
while ((line = input.readLine()) != null) {
String splits[] = line.split(FuzzyJoinConfig.RECORD_SEPARATOR_REGEX);
int rid = Integer.parseInt(splits[FuzzyJoinConfig.RECORD_KEY]);
outputTokenized.writeInt(rid);
- Collection<String> tokens = tokenizer
- .tokenize(FuzzyJoinUtil.getData(splits, dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
+ Collection<String> tokens =
+ tokenizer.tokenize(FuzzyJoinUtil.getData(splits, dataColumns, FuzzyJoinConfig.TOKEN_SEPARATOR));
Collection<Integer> tokensRanked = tokenRank.getTokenRanks(tokens);
outputTokenized.writeInt(tokensRanked.size());
for (Integer token : tokensRanked) {
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/invertedlist/InvertedList.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/invertedlist/InvertedList.java
index ee1e362..7a9356a 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/invertedlist/InvertedList.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/invertedlist/InvertedList.java
@@ -19,7 +19,6 @@
package org.apache.asterix.fuzzyjoin.invertedlist;
-
public interface InvertedList extends Iterable<int[]> {
public void add(int[] element);
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityFiltersJaccard.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityFiltersJaccard.java
index f16fed9..556c0b7 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityFiltersJaccard.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityFiltersJaccard.java
@@ -186,8 +186,8 @@
* @return
*/
public boolean passPositionFilter(int noGramsCommon, int positionX, int lengthX, int positionY, int lengthY) {
- return getIntersectUpperBound(noGramsCommon, positionX, positionY, lengthX, lengthY) >= getIntersectLowerBound(
- lengthX, lengthY);
+ return getIntersectUpperBound(noGramsCommon, positionX, positionY, lengthX,
+ lengthY) >= getIntersectLowerBound(lengthX, lengthY);
}
public float passSimilarityFilter(final int[] tokensX, int startX, int lengthX, final int prefixLengthX,
@@ -208,9 +208,9 @@
}
} else {
if (intersectionSizePrefix + lengthProbe - prefixLengthY >= intersectSizeLowerBound) {
- intersectSize = intersectionSizePrefix
- + SimilarityMetric.getIntersectSize(tokensX, startX + intersectionSizePrefix, lengthX
- - intersectionSizePrefix, tokensY, startY + prefixLengthY, lengthY - prefixLengthY);
+ intersectSize = intersectionSizePrefix + SimilarityMetric.getIntersectSize(tokensX,
+ startX + intersectionSizePrefix, lengthX - intersectionSizePrefix, tokensY,
+ startY + prefixLengthY, lengthY - prefixLengthY);
}
}
@@ -268,8 +268,8 @@
public boolean passSuffixFilter(int[] tokensX, int tokensStartX, int tokensLengthX, int positionX, int[] tokensY,
int tokensStartY, int tokensLengthY, int positionY) {
- int hammingMax = tokensLengthX + tokensLengthY - 2
- * (int) Math.ceil(simThr100 / (100 + simThr100) * (tokensLengthX + tokensLengthY))
+ int hammingMax = tokensLengthX + tokensLengthY
+ - 2 * (int) Math.ceil(simThr100 / (100 + simThr100) * (tokensLengthX + tokensLengthY))
- (positionX + 1 + positionY + 1 - 2);
int hamming = getSuffixFilter(tokensX, tokensStartX + positionX + 1, tokensLengthX - positionX - 1, tokensY,
tokensStartY + positionY + 1, tokensLengthY - positionY - 1, hammingMax, 1);
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityMetricJaccard.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityMetricJaccard.java
index 4a31b8b..63d3077 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityMetricJaccard.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/similarity/SimilarityMetricJaccard.java
@@ -60,8 +60,8 @@
// apply length filter
int lengthLowerBound = (int) Math.ceil(simThresh * firstList.size());
- boolean passesLengthFilter = (lengthLowerBound <= secondList.size())
- && (secondList.size() <= 1.0f / simThresh * firstList.size());
+ boolean passesLengthFilter =
+ (lengthLowerBound <= secondList.size()) && (secondList.size() <= 1.0f / simThresh * firstList.size());
if (!passesLengthFilter) {
return -1f;
}
diff --git a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/tokenizer/TokenizerBufferedFactory.java b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/tokenizer/TokenizerBufferedFactory.java
index 164b965..2f4e8c6 100644
--- a/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/tokenizer/TokenizerBufferedFactory.java
+++ b/asterixdb/asterix-fuzzyjoin/src/main/java/org/apache/asterix/fuzzyjoin/tokenizer/TokenizerBufferedFactory.java
@@ -28,6 +28,7 @@
}
public static boolean isSeparator(char c) {
- return !(Character.isLetterOrDigit(c) || Character.getType(c) == Character.OTHER_LETTER || Character.getType(c) == Character.OTHER_NUMBER);
+ return !(Character.isLetterOrDigit(c) || Character.getType(c) == Character.OTHER_LETTER
+ || Character.getType(c) == Character.OTHER_NUMBER);
}
}
diff --git a/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTest.java b/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTest.java
index 40e4b8e..d2f5da5 100644
--- a/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTest.java
+++ b/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/FuzzyJoinTest.java
@@ -55,7 +55,8 @@
BufferedWriter out = new BufferedWriter(new FileWriter(base + dataset.getPathPart0(Directory.SSJOINOUT)));
for (ResultSelfJoin result : results) {
- out.write(String.format("%d %d %.3f\n", rids.get(result.indexX), rids.get(result.indexY), result.similarity));
+ out.write(
+ String.format("%d %d %.3f\n", rids.get(result.indexX), rids.get(result.indexY), result.similarity));
}
out.close();
diff --git a/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/AbstractDataset.java b/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/AbstractDataset.java
index 1629c74..fe77a38 100644
--- a/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/AbstractDataset.java
+++ b/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/AbstractDataset.java
@@ -40,7 +40,8 @@
}
public static enum Relation {
- R, S,
+ R,
+ S,
}
public static final String FILE_PART = "part-";
diff --git a/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/DBLPDataset.java b/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/DBLPDataset.java
index 429a26e..15fada4 100644
--- a/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/DBLPDataset.java
+++ b/asterixdb/asterix-fuzzyjoin/src/test/java/org/apache/asterix/fuzzyjoin/tests/dataset/DBLPDataset.java
@@ -19,7 +19,6 @@
package org.apache.asterix.fuzzyjoin.tests.dataset;
-
public class DBLPDataset extends PublicationsDataset {
private static final String NAME = "dblp";
private static final int NO_RECORDS = 1268017;
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/parser/FunctionParser.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/parser/FunctionParser.java
index 098b447..eb9631a 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/parser/FunctionParser.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/parser/FunctionParser.java
@@ -42,8 +42,8 @@
public FunctionDecl getFunctionDecl(Function function) throws CompilationException {
if (!function.getLanguage().equals(Function.LANGUAGE_AQL)) {
- throw new CompilationException(ErrorCode.COMPILATION_INCOMPATIBLE_FUNCTION_LANGUAGE,
- Function.LANGUAGE_AQL, function.getLanguage());
+ throw new CompilationException(ErrorCode.COMPILATION_INCOMPATIBLE_FUNCTION_LANGUAGE, Function.LANGUAGE_AQL,
+ function.getLanguage());
}
String functionBody = function.getFunctionBody();
List<String> params = function.getParams();
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlQueryRewriter.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlQueryRewriter.java
index 6ab69ad..d2cb64f 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlQueryRewriter.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/rewrites/AqlQueryRewriter.java
@@ -123,8 +123,8 @@
declaredFunctions.addAll(storedFunctionDecls);
}
if (!declaredFunctions.isEmpty()) {
- AQLInlineUdfsVisitor visitor = new AQLInlineUdfsVisitor(context, new AQLRewriterFactory(),
- declaredFunctions, metadataProvider);
+ AQLInlineUdfsVisitor visitor =
+ new AQLInlineUdfsVisitor(context, new AQLRewriterFactory(), declaredFunctions, metadataProvider);
while (topStatement.accept(visitor, declaredFunctions)) {
// loop until no more changes
}
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/util/AQLVariableSubstitutionUtil.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/util/AQLVariableSubstitutionUtil.java
index 096b12f..f19afac 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/util/AQLVariableSubstitutionUtil.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/util/AQLVariableSubstitutionUtil.java
@@ -32,8 +32,8 @@
public static ILangExpression substituteVariable(ILangExpression expression,
Map<VariableExpr, Expression> varExprMap) throws CompilationException {
- AQLCloneAndSubstituteVariablesVisitor visitor = new AQLCloneAndSubstituteVariablesVisitor(
- new LangRewritingContext(0));
+ AQLCloneAndSubstituteVariablesVisitor visitor =
+ new AQLCloneAndSubstituteVariablesVisitor(new LangRewritingContext(0));
VariableSubstitutionEnvironment env = new VariableSubstitutionEnvironment(varExprMap);
return expression.accept(visitor, env).first;
}
diff --git a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLCloneAndSubstituteVariablesVisitor.java b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLCloneAndSubstituteVariablesVisitor.java
index 0e7b4ad..6008723 100644
--- a/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLCloneAndSubstituteVariablesVisitor.java
+++ b/asterixdb/asterix-lang-aql/src/main/java/org/apache/asterix/lang/aql/visitor/AQLCloneAndSubstituteVariablesVisitor.java
@@ -93,8 +93,8 @@
@Override
public Pair<ILangExpression, VariableSubstitutionEnvironment> visit(DistinctClause dc,
VariableSubstitutionEnvironment env) throws CompilationException {
- List<Expression> exprList = VariableCloneAndSubstitutionUtil.visitAndCloneExprList(dc.getDistinctByExpr(), env,
- this);
+ List<Expression> exprList =
+ VariableCloneAndSubstitutionUtil.visitAndCloneExprList(dc.getDistinctByExpr(), env, this);
DistinctClause dc2 = new DistinctClause(exprList);
return new Pair<ILangExpression, VariableSubstitutionEnvironment>(dc2, env);
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IQueryRewriter.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IQueryRewriter.java
index f072917..05ab836 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IQueryRewriter.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/base/IQueryRewriter.java
@@ -41,9 +41,8 @@
* @param context,
* manages ids of variables and guarantees uniqueness of variables.
*/
- void rewrite(List<FunctionDecl> declaredFunctions, IReturningStatement topExpr,
- MetadataProvider metadataProvider, LangRewritingContext context, boolean inlineUdfs)
- throws CompilationException;
+ void rewrite(List<FunctionDecl> declaredFunctions, IReturningStatement topExpr, MetadataProvider metadataProvider,
+ LangRewritingContext context, boolean inlineUdfs) throws CompilationException;
/**
* Find the function calls used by a given expression
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java
index df6d3a5..bf46c5d 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/context/Scope.java
@@ -203,7 +203,7 @@
return symbols.keySet();
}
- public Scope getParentScope(){
+ public Scope getParentScope() {
return parent;
}
}
diff --git a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java
index 8dc7b87..db74938 100644
--- a/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java
+++ b/asterixdb/asterix-lang-common/src/main/java/org/apache/asterix/lang/common/statement/Query.java
@@ -44,8 +44,7 @@
this(explain, topLevel, body, varCounter, null);
}
- public Query(boolean explain, boolean topLevel, Expression body, int varCounter,
- List<VarIdentifier> externalVars) {
+ public Query(boolean explain, boolean topLevel, Expression body, int varCounter, List<VarIdentifier> externalVars) {
this.explain = explain;
this.topLevel = topLevel;
this.body = body;
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java
index cd57396..7858e58 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/SqlppFunctionBodyRewriter.java
@@ -30,8 +30,8 @@
@Override
public void rewrite(List<FunctionDecl> declaredFunctions, IReturningStatement topStatement,
- MetadataProvider metadataProvider,
- LangRewritingContext context, boolean inlineUdfs) throws CompilationException {
+ MetadataProvider metadataProvider, LangRewritingContext context, boolean inlineUdfs)
+ throws CompilationException {
// Sets up parameters.
setup(declaredFunctions, topStatement, metadataProvider, context);
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineWithExpressionVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineWithExpressionVisitor.java
index 54ef5ab..ebb7b10 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineWithExpressionVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/InlineWithExpressionVisitor.java
@@ -66,8 +66,8 @@
}
// Inlines WITH expressions into the select expression.
- SelectExpression newSelectExpression = (SelectExpression) substituteExpression(selectExpression,
- varExprMap, context);
+ SelectExpression newSelectExpression =
+ (SelectExpression) substituteExpression(selectExpression, varExprMap, context);
// Continues to visit the rewritten select expression.
return super.visit(newSelectExpression, arg);
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
index 43e62d1..7e228f4 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/OperatorExpressionVisitor.java
@@ -56,7 +56,7 @@
operatorExpr.setExprList(newExprList);
OperatorType opType = operatorExpr.getOpList().get(0);
switch (opType) {
- // There can only be one LIKE/NOT_LIKE/IN/NOT_IN in an operator expression (according to the grammar).
+ // There can only be one LIKE/NOT_LIKE/IN/NOT_IN in an operator expression (according to the grammar).
case LIKE:
case NOT_LIKE:
return processLikeOperator(operatorExpr, opType);
@@ -99,12 +99,14 @@
comparison.setCurrentop(true);
if (opType == OperatorType.IN) {
comparison.addOperator(OperatorType.EQ);
- return new QuantifiedExpression(Quantifier.SOME, new ArrayList<>(
- Collections.singletonList(new QuantifiedPair(bindingVar, collectionExpr))), comparison);
+ return new QuantifiedExpression(Quantifier.SOME,
+ new ArrayList<>(Collections.singletonList(new QuantifiedPair(bindingVar, collectionExpr))),
+ comparison);
} else {
comparison.addOperator(OperatorType.NEQ);
- return new QuantifiedExpression(Quantifier.EVERY, new ArrayList<>(
- Collections.singletonList(new QuantifiedPair(bindingVar, collectionExpr))), comparison);
+ return new QuantifiedExpression(Quantifier.EVERY,
+ new ArrayList<>(Collections.singletonList(new QuantifiedPair(bindingVar, collectionExpr))),
+ comparison);
}
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByAggregationSugarVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByAggregationSugarVisitor.java
index 6f420b4..c5f0a54 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByAggregationSugarVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppGroupByAggregationSugarVisitor.java
@@ -282,9 +282,8 @@
new FieldAccessor(new FieldAccessor(fromBindingVar, fieldVars.values().iterator().next()),
SqlppVariableUtil.toUserDefinedVariableName(usedVar.getVar())));
} else {
- throw new CompilationException(
- "Cannot resolve alias reference for undefined identifier " + usedVar.getVar().getValue()
- + " in " + fieldVars);
+ throw new CompilationException("Cannot resolve alias reference for undefined identifier "
+ + usedVar.getVar().getValue() + " in " + fieldVars);
}
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
index 3bd29ef..d1d95ac 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SqlppInlineUdfsVisitor.java
@@ -248,8 +248,7 @@
Map<Expression, Expression> varExprMap = new HashMap<>();
for (LetClause lc : letClauses) {
// inline let variables one by one iteratively.
- lc.setBindingExpr(SqlppRewriteUtil.substituteExpression(lc.getBindingExpr(),
- varExprMap, context));
+ lc.setBindingExpr(SqlppRewriteUtil.substituteExpression(lc.getBindingExpr(), varExprMap, context));
varExprMap.put(lc.getVarExpr(), lc.getBindingExpr());
}
return varExprMap;
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SubstituteGroupbyExpressionWithVariableVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SubstituteGroupbyExpressionWithVariableVisitor.java
index 6d4816b..f157f4f 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SubstituteGroupbyExpressionWithVariableVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/SubstituteGroupbyExpressionWithVariableVisitor.java
@@ -60,8 +60,8 @@
SubstituteGroupbyExpressionVisitor visitor = new SubstituteGroupbyExpressionVisitor(context, map);
// Rewrites LET/HAVING/SELECT clauses.
- if(selectBlock.hasLetClausesAfterGroupby()){
- for(LetClause letClause : selectBlock.getLetListAfterGroupby()){
+ if (selectBlock.hasLetClausesAfterGroupby()) {
+ for (LetClause letClause : selectBlock.getLetListAfterGroupby()) {
letClause.accept(this, arg);
}
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
index c28029a..04f8bc6 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/rewrites/visitor/VariableCheckAndRewriteVisitor.java
@@ -118,9 +118,8 @@
SqlppVariableUtil.toUserDefinedVariableName(varName).getValue());
default:
// More than one possibilities.
- throw new CompilationException(
- "Cannot resolve ambiguous alias reference for undefined identifier " + SqlppVariableUtil
- .toUserDefinedVariableName(varName).getValue() + " in " + localVars);
+ throw new CompilationException("Cannot resolve ambiguous alias reference for undefined identifier "
+ + SqlppVariableUtil.toUserDefinedVariableName(varName).getValue() + " in " + localVars);
}
}
@@ -159,10 +158,9 @@
+ " because there is no dataverse declared, nor an alias with name " + datasetName + "!");
}
//If no available dataset nor in-scope variable to resolve to, we throw an error.
- throw new CompilationException(
- "Cannot find dataset " + datasetName + " in dataverse " + (dataverseName == null ?
- defaultDataverseName :
- dataverseName) + " nor an alias with name " + datasetName + "!");
+ throw new CompilationException("Cannot find dataset " + datasetName + " in dataverse "
+ + (dataverseName == null ? defaultDataverseName : dataverseName) + " nor an alias with name "
+ + datasetName + "!");
}
// For a From/Join/UNNEST/Quantifiers binding expression, we resolve the undefined identifier reference as
@@ -175,8 +173,8 @@
private boolean datasetExists(String dataverseName, String datasetName) throws CompilationException {
try {
- return metadataProvider.findDataset(dataverseName, datasetName) != null || fullyQualifiedDatasetNameExists(
- datasetName);
+ return metadataProvider.findDataset(dataverseName, datasetName) != null
+ || fullyQualifiedDatasetNameExists(datasetName);
} catch (AlgebricksException e) {
throw new CompilationException(e);
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
index 06bb1b9..1ff7c9e 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java
@@ -143,8 +143,8 @@
return callExpr;
}
callExpr.setFunctionSignature(new FunctionSignature(FunctionConstants.ASTERIX_NS, internalFuncName, 1));
- callExpr.setExprList(new ArrayList<>(Collections.singletonList(new ListConstructor(
- ListConstructor.Type.ORDERED_LIST_CONSTRUCTOR, callExpr.getExprList()))));
+ callExpr.setExprList(new ArrayList<>(Collections.singletonList(
+ new ListConstructor(ListConstructor.Type.ORDERED_LIST_CONSTRUCTOR, callExpr.getExprList()))));
return callExpr;
}
diff --git a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
index 737cc53..bcb131b 100644
--- a/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
+++ b/asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/visitor/base/AbstractSqlppExpressionScopingVisitor.java
@@ -274,7 +274,7 @@
public Expression visit(LimitClause limitClause, ILangExpression arg) throws CompilationException {
scopeChecker.pushForbiddenScope(scopeChecker.getCurrentScope());
limitClause.setLimitExpr(visit(limitClause.getLimitExpr(), limitClause));
- if(limitClause.hasOffset()) {
+ if (limitClause.hasOffset()) {
limitClause.setOffset(visit(limitClause.getOffset(), limitClause));
}
scopeChecker.popForbiddenScope();
@@ -375,8 +375,8 @@
// Adds a new encountered alias identifier into a scope
private void addNewVarSymbolToScope(Scope scope, VarIdentifier var) throws CompilationException {
if (scope.findLocalSymbol(var.getValue()) != null) {
- throw new CompilationException("Duplicate alias definitions: "
- + SqlppVariableUtil.toUserDefinedName(var.getValue()));
+ throw new CompilationException(
+ "Duplicate alias definitions: " + SqlppVariableUtil.toUserDefinedName(var.getValue()));
}
scope.addNewVarSymbolToScope(var);
}
@@ -387,8 +387,7 @@
for (String symbolToBeMerged : symbolsToBeMerged) {
if (hostScope.findLocalSymbol(symbolToBeMerged) != null) {
throw new CompilationException(
- "Duplicate alias definitions: "
- + SqlppVariableUtil.toUserDefinedName(symbolToBeMerged));
+ "Duplicate alias definitions: " + SqlppVariableUtil.toUserDefinedName(symbolToBeMerged));
}
}
hostScope.merge(scopeToBeMerged);
diff --git a/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorMissingCheckVisitor.java b/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorMissingCheckVisitor.java
index adedee5..9339e8b 100644
--- a/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorMissingCheckVisitor.java
+++ b/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorMissingCheckVisitor.java
@@ -44,8 +44,8 @@
private static final String TYPE_CHECKER_DESC = "L" + TYPE_CHECKER_CLASS + ";";
private static final String TYPE_CHECKER_NAME = "typeChecker";
private static final String IS_MISSING = "isMissing";
- private static final String TYPECHECK_METHOD_DESC = "(Lorg/apache/hyracks/data/std/api/IPointable;"
- + "Lorg/apache/hyracks/data/std/api/IPointable;)Z";
+ private static final String TYPECHECK_METHOD_DESC =
+ "(Lorg/apache/hyracks/data/std/api/IPointable;" + "Lorg/apache/hyracks/data/std/api/IPointable;)Z";
private static final String CONSTRUCTOR = "<init>";
private String className = null;
private Label lastAddedLabel = null;
diff --git a/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorNullCheckVisitor.java b/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorNullCheckVisitor.java
index 30f810b..84bc320 100644
--- a/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorNullCheckVisitor.java
+++ b/asterixdb/asterix-maven-plugins/asterix-evaluator-generator-maven-plugin/src/main/java/org/apache/asterix/runtime/evaluators/staticcodegen/EvaluatorNullCheckVisitor.java
@@ -31,8 +31,8 @@
+ "IFrameTupleReference;Lorg/apache/hyracks/data/std/api/IPointable;)V";
private final static String EVALUATE = "evaluate";
private final static MethodIdentifier METHOD_IDENTIFIER = new MethodIdentifier(EVALUATE, EVALUATE_DESC, null);
- private final static String TYPE_CHECKER_CLASS = "org/apache/asterix/runtime/evaluators/staticcodegen/"
- + "TypeChecker";
+ private final static String TYPE_CHECKER_CLASS =
+ "org/apache/asterix/runtime/evaluators/staticcodegen/" + "TypeChecker";
private final static String TYPE_CHECKER_DESC = "L" + TYPE_CHECKER_CLASS + ";";
private final static String TYPE_CHECKER_NAME = "typeChecker";
private final static String IS_NULL = "isNull";
diff --git a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java
index 0abca8a..52b2be5 100644
--- a/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java
+++ b/asterixdb/asterix-maven-plugins/asterix-grammar-extension-maven-plugin/src/main/java/org/apache/asterix/extension/grammar/GrammarExtensionMojo.java
@@ -61,8 +61,8 @@
private static final char OPEN_PAREN = '(';
private static final char CLOSE_PAREN = ')';
private static final char SEMICOLON = ';';
- private static final List<Character> SIG_SPECIAL_CHARS = Arrays
- .asList(new Character[] { '(', ')', ':', '<', '>', ';', '.' });
+ private static final List<Character> SIG_SPECIAL_CHARS =
+ Arrays.asList(new Character[] { '(', ')', ':', '<', '>', ';', '.' });
private static final String KWCLASS = "class";
private static final String KWIMPORT = "import";
private static final String KWUNIMPORT = "unimport";
@@ -80,8 +80,8 @@
private static final String WITH = "with";
private static final String OPTION_TRUE = "true";
private static final String OPTION_FALSE = "false";
- private static final List<String> KEYWORDS = Arrays
- .asList(new String[] { KWCLASS, KWIMPORT, KWPACKAGE, PARSER_BEGIN, PARSER_END });
+ private static final List<String> KEYWORDS =
+ Arrays.asList(new String[] { KWCLASS, KWIMPORT, KWPACKAGE, PARSER_BEGIN, PARSER_END });
private static final List<String> EXTENSIONKEYWORDS =
Arrays.asList(new String[] { KWIMPORT, KWUNIMPORT, NEWPRODUCTION, NEW_AT_THE_END_PRODUCTION,
NEW_AT_THE_END_CLASS_DEFINITION, OVERRIDEPRODUCTION, MERGEPRODUCTION });
@@ -974,8 +974,8 @@
int after = block.indexOf(AFTER);
if (before >= 0) {
// before exists
- amendments[beforeIndex] = block.substring(before + BEFORE.length(),
- (after >= 0) ? after : block.length() - 1);
+ amendments[beforeIndex] =
+ block.substring(before + BEFORE.length(), (after >= 0) ? after : block.length() - 1);
if (amendments[beforeIndex].trim().length() == 0) {
amendments[beforeIndex] = null;
}
diff --git a/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/TestDataGeneratorMojo.java b/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/TestDataGeneratorMojo.java
index b56ab47..dc56a29 100644
--- a/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/TestDataGeneratorMojo.java
+++ b/asterixdb/asterix-maven-plugins/asterix-test-datagenerator-maven-plugin/src/main/java/org/apache/hyracks/maven/plugin/TestDataGeneratorMojo.java
@@ -55,7 +55,6 @@
*/
File outputDir;
-
/**
* @parameter default-value="${maven.test.skip}"
*/
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java
index 3ae7339..83678a5 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/main/java/org/apache/asterix/lexergenerator/LexerGenerator.java
@@ -130,8 +130,7 @@
StringBuilder result = new StringBuilder();
Set<String> functions = main.neededAuxFunctions();
for (String token : functions) {
- result.append("private int parse_" + token
- + "(char currentChar) throws IOException {\n");
+ result.append("private int parse_" + token + "(char currentChar) throws IOException {\n");
result.append(tokens.get(token).getNode().toJavaAuxFunction());
result.append("\n}\n\n");
}
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/Fixtures.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/Fixtures.java
index a6d0267..b7aa5e9 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/Fixtures.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/Fixtures.java
@@ -22,28 +22,28 @@
import org.apache.asterix.lexergenerator.rules.RuleChar;
public class Fixtures {
- static String token_name = "MYTOKEN";
- static String token2_name = "MYTOKEN2";
- static String token_return = "return TOKEN_MYTOKEN;\n";
- static String token2_return = "return TOKEN_MYTOKEN2;\n";
- static String token_parseerror = "return parseError(TOKEN_MYTOKEN);\n";
- static String token_tostring = "! ";
- static String rule_action = "myaction";
- static String rule_name = "myrule";
- static String rule_match = "matchCheck("+rule_name+")";
- static String rule2_action = "myaction2";
- static String rule2_name = "myrule2";
- static String rule2_match = "matchCheck2("+rule_name+")";
+ static String token_name = "MYTOKEN";
+ static String token2_name = "MYTOKEN2";
+ static String token_return = "return TOKEN_MYTOKEN;\n";
+ static String token2_return = "return TOKEN_MYTOKEN2;\n";
+ static String token_parseerror = "return parseError(TOKEN_MYTOKEN);\n";
+ static String token_tostring = "! ";
+ static String rule_action = "myaction";
+ static String rule_name = "myrule";
+ static String rule_match = "matchCheck(" + rule_name + ")";
+ static String rule2_action = "myaction2";
+ static String rule2_name = "myrule2";
+ static String rule2_match = "matchCheck2(" + rule_name + ")";
- static public Rule createRule(final String name){
- return new Rule(){
- String rule_name = name;
- String rule_action = "myaction";
- String rule_match = "matchCheck("+rule_name+")";
+ static public Rule createRule(final String name) {
+ return new Rule() {
+ String rule_name = name;
+ String rule_action = "myaction";
+ String rule_match = "matchCheck(" + rule_name + ")";
@Override
- public Rule clone(){
- return Fixtures.createRule(name+"_clone");
+ public Rule clone() {
+ return Fixtures.createRule(name + "_clone");
}
@Override
@@ -53,20 +53,20 @@
@Override
public String javaMatch(String action) {
- return rule_match+"{"+action+"}";
+ return rule_match + "{" + action + "}";
}
@Override
- public String toString(){
+ public String toString() {
return rule_name;
}
};
}
- static Rule rule = new Rule(){
+ static Rule rule = new Rule() {
- public Rule clone(){
+ public Rule clone() {
return null;
}
@@ -77,19 +77,19 @@
@Override
public String javaMatch(String action) {
- return rule_match+"{"+action+"}";
+ return rule_match + "{" + action + "}";
}
@Override
- public String toString(){
+ public String toString() {
return rule_name;
}
};
- static Rule rule2 = new Rule(){
+ static Rule rule2 = new Rule() {
- public Rule clone(){
+ public Rule clone() {
return null;
}
@@ -100,11 +100,11 @@
@Override
public String javaMatch(String act) {
- return rule2_match+"{"+act+"}";
+ return rule2_match + "{" + act + "}";
}
@Override
- public String toString(){
+ public String toString() {
return rule2_name;
}
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAddRuleTest.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAddRuleTest.java
index dee1255..da2241c 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAddRuleTest.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAddRuleTest.java
@@ -33,16 +33,10 @@
node.append(rule);
node.add(rule2);
node.appendTokenName(token_name);
- assertEquals(" ( " + rule_name +token_tostring + " || " + rule2_name + token_tostring + " ) ", node.toString());
- assertEquals(rule_match+"{"
- +"\n" + rule_action
- +"\n" +token_return
- +"}"
- +rule2_match+"{"
- +"\n"+rule2_action
- +"\n"+token_return
- +"}"
- +token_parseerror , node.toJava());
+ assertEquals(" ( " + rule_name + token_tostring + " || " + rule2_name + token_tostring + " ) ",
+ node.toString());
+ assertEquals(rule_match + "{" + "\n" + rule_action + "\n" + token_return + "}" + rule2_match + "{" + "\n"
+ + rule2_action + "\n" + token_return + "}" + token_parseerror, node.toJava());
}
@Test
@@ -52,18 +46,11 @@
node.add(ruleB);
node.add(ruleC);
node.appendTokenName(token_name);
- assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ", node.toString());
- assertEquals("switch(currentChar){\n" +
- "case 'a':" +
- "\n" + ruleABC_action +
- "\n" + token_return +
- "case 'b':" +
- "\n" + ruleABC_action +
- "\n" + token_return +
- "case 'c':" +
- "\n" + ruleABC_action +
- "\n" + token_return +
- "}\n"+ token_parseerror , node.toJava());
+ assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ",
+ node.toString());
+ assertEquals("switch(currentChar){\n" + "case 'a':" + "\n" + ruleABC_action + "\n" + token_return + "case 'b':"
+ + "\n" + ruleABC_action + "\n" + token_return + "case 'c':" + "\n" + ruleABC_action + "\n"
+ + token_return + "}\n" + token_parseerror, node.toJava());
}
}
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendNodeTest.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendNodeTest.java
index 206e69b..7838f5b 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendNodeTest.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendNodeTest.java
@@ -23,7 +23,6 @@
import org.junit.Test;
-
import org.apache.asterix.lexergenerator.LexerNode;
import org.apache.asterix.lexergenerator.rules.RuleEpsilon;
@@ -94,6 +93,6 @@
node.append(node2);
// TODO
// assertEquals(" ( A_clone! A_clone! || A_clone! ) ", node.toString());
- }
+ }
}
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendRuleTest.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendRuleTest.java
index e532cf8..28c5e31 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendRuleTest.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAppendRuleTest.java
@@ -31,7 +31,7 @@
LexerNode node = new LexerNode();
node.appendTokenName(token_name);
assertEquals(token_tostring, node.toString());
- assertEquals(token_return, node.toJava());
+ assertEquals(token_return, node.toJava());
}
@Test
@@ -39,11 +39,9 @@
LexerNode node = new LexerNode();
node.append(rule);
node.appendTokenName(token_name);
- assertEquals(rule_name+token_tostring, node.toString());
- assertEquals(rule_match+"{"
- +"\n"+rule_action
- +"\n"+token_return
- +"}"+token_parseerror, node.toJava());
+ assertEquals(rule_name + token_tostring, node.toString());
+ assertEquals(rule_match + "{" + "\n" + rule_action + "\n" + token_return + "}" + token_parseerror,
+ node.toJava());
}
@Test
@@ -52,14 +50,8 @@
node.append(rule);
node.append(rule2);
node.appendTokenName(token_name);
- assertEquals(rule_name+rule2_name+token_tostring, node.toString());
- assertEquals(rule_match+"{"
- +"\n"+rule_action
- +"\n"+rule2_match+"{"
- +"\n"+rule2_action
- +"\n"+token_return
- +"}"
- +token_parseerror
- +"}"+token_parseerror, node.toJava());
+ assertEquals(rule_name + rule2_name + token_tostring, node.toString());
+ assertEquals(rule_match + "{" + "\n" + rule_action + "\n" + rule2_match + "{" + "\n" + rule2_action + "\n"
+ + token_return + "}" + token_parseerror + "}" + token_parseerror, node.toJava());
}
}
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java
index 677d12e..7fd4aa7 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeAuxFunctionsTest.java
@@ -27,7 +27,6 @@
import org.junit.Test;
-
import org.apache.asterix.lexergenerator.LexerNode;
import org.apache.asterix.lexergenerator.Token;
import org.apache.asterix.lexergenerator.rules.RuleEpsilon;
@@ -42,16 +41,10 @@
node.append(rule);
node.add(rule2);
node.appendTokenName(token_name);
- assertEquals(" ( " + rule_name +token_tostring + " || " + rule2_name + token_tostring + " ) ", node.toString());
- assertEquals(rule_match+"{"
- +"\n" + rule_action
- +"\n" +token_return
- +"}"
- +rule2_match+"{"
- +"\n"+rule2_action
- +"\n"+token_return
- +"}"
- +expectedDifferentReturn , node.toJavaAuxFunction());
+ assertEquals(" ( " + rule_name + token_tostring + " || " + rule2_name + token_tostring + " ) ",
+ node.toString());
+ assertEquals(rule_match + "{" + "\n" + rule_action + "\n" + token_return + "}" + rule2_match + "{" + "\n"
+ + rule2_action + "\n" + token_return + "}" + expectedDifferentReturn, node.toJavaAuxFunction());
}
@Test
@@ -61,18 +54,11 @@
node.add(ruleB);
node.add(ruleC);
node.appendTokenName(token_name);
- assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ", node.toString());
- assertEquals("switch(currentChar){\n" +
- "case 'a':" +
- "\n" + ruleABC_action +
- "\n" + token_return +
- "case 'b':" +
- "\n" + ruleABC_action +
- "\n" + token_return +
- "case 'c':" +
- "\n" + ruleABC_action +
- "\n" + token_return +
- "}\n"+ expectedDifferentReturn , node.toJavaAuxFunction());
+ assertEquals(" ( a" + token_tostring + " || b" + token_tostring + " || c" + token_tostring + " ) ",
+ node.toString());
+ assertEquals("switch(currentChar){\n" + "case 'a':" + "\n" + ruleABC_action + "\n" + token_return + "case 'b':"
+ + "\n" + ruleABC_action + "\n" + token_return + "case 'c':" + "\n" + ruleABC_action + "\n"
+ + token_return + "}\n" + expectedDifferentReturn, node.toJavaAuxFunction());
}
@Test
@@ -90,7 +76,7 @@
assertEquals(expectedNeededAuxFunctions, node.neededAuxFunctions());
}
- @Test(expected=Exception.class)
+ @Test(expected = Exception.class)
public void NodeExpandFirstActionError() throws Exception {
LexerNode node = new LexerNode();
node.append(ruleA);
@@ -104,7 +90,8 @@
try {
node.expandFirstAction(tokens);
} catch (Exception e) {
- assertEquals("Cannot find a token used as part of another definition, missing token: token1", e.getMessage());
+ assertEquals("Cannot find a token used as part of another definition, missing token: token1",
+ e.getMessage());
throw e;
}
}
diff --git a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeMergeNodeTest.java b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeMergeNodeTest.java
index caee9a5..a9f6012 100644
--- a/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeMergeNodeTest.java
+++ b/asterixdb/asterix-maven-plugins/lexer-generator-maven-plugin/src/test/java/org/apache/asterix/lexergenerator/LexerNodeMergeNodeTest.java
@@ -67,9 +67,11 @@
node2.appendTokenName(token2_name);
node.merge(node2);
- assertEquals(" ( " + rule_name + token_tostring + " || " + rule2_name + token_tostring + " ) ", node.toString());
- assertEquals(rule_match + "{" + "\n" + rule_action + "\n" + token_return + "}" + rule2_match + "{" + "\n"
- + rule2_action + "\n" + token2_return + "}return parseError(TOKEN_MYTOKEN,TOKEN_MYTOKEN2);\n",
+ assertEquals(" ( " + rule_name + token_tostring + " || " + rule2_name + token_tostring + " ) ",
+ node.toString());
+ assertEquals(
+ rule_match + "{" + "\n" + rule_action + "\n" + token_return + "}" + rule2_match + "{" + "\n"
+ + rule2_action + "\n" + token2_return + "}return parseError(TOKEN_MYTOKEN,TOKEN_MYTOKEN2);\n",
node.toJava());
}
diff --git a/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/Generator.java b/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/Generator.java
index ca53f66..805e194 100644
--- a/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/Generator.java
+++ b/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/Generator.java
@@ -34,13 +34,8 @@
SUPPORT
}
- public static void generateSource(
- TemplateType tmplType,
- String packageName,
- RecordType rec,
- InputStream is,
- StringBuilder sb,
- boolean debug) {
+ public static void generateSource(TemplateType tmplType, String packageName, RecordType rec, InputStream is,
+ StringBuilder sb, boolean debug) {
try {
BufferedReader in = new BufferedReader(new InputStreamReader(is));
@@ -63,16 +58,12 @@
}
- private static void generateMemoryManagerSource(
- String packageName,
- RecordType resource,
- BufferedReader in,
- StringBuilder sb,
- boolean debug) throws IOException {
+ private static void generateMemoryManagerSource(String packageName, RecordType resource, BufferedReader in,
+ StringBuilder sb, boolean debug) throws IOException {
String line = null;
String indent = " ";
- while((line = in.readLine()) != null) {
+ while ((line = in.readLine()) != null) {
if (line.contains("@PACKAGE@")) {
line = line.replace("@PACKAGE@", packageName);
}
@@ -114,16 +105,12 @@
}
}
- private static void generateArenaManagerSource(
- String packageName,
- RecordType resource,
- BufferedReader in,
- StringBuilder sb,
- boolean debug) throws IOException {
+ private static void generateArenaManagerSource(String packageName, RecordType resource, BufferedReader in,
+ StringBuilder sb, boolean debug) throws IOException {
String line = null;
String indent = " ";
- while((line = in.readLine()) != null) {
+ while ((line = in.readLine()) != null) {
if (line.contains("@PACKAGE@")) {
line = line.replace("@PACKAGE@", packageName);
}
@@ -152,13 +139,10 @@
}
}
- private static void generateSupportFileSource(
- String packageName,
- BufferedReader in,
- StringBuilder sb,
+ private static void generateSupportFileSource(String packageName, BufferedReader in, StringBuilder sb,
boolean debug) throws IOException {
String line = null;
- while((line = in.readLine()) != null) {
+ while ((line = in.readLine()) != null) {
if (line.contains("@PACKAGE@")) {
line = line.replace("@PACKAGE@", packageName);
}
diff --git a/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/RecordManagerGeneratorMojo.java b/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/RecordManagerGeneratorMojo.java
index 909edc3..e5f50d3 100644
--- a/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/RecordManagerGeneratorMojo.java
+++ b/asterixdb/asterix-maven-plugins/record-manager-generator-maven-plugin/src/main/java/org/apache/asterix/recordmanagergenerator/RecordManagerGeneratorMojo.java
@@ -68,7 +68,6 @@
*/
MavenProject project;
-
String recordManagerTemplate = "RecordManager.java";
String arenaManagerTemplate = "ArenaManager.java";
String[] supportTemplates = { "RecordManagerStats.java", "AllocInfo.java", "TypeUtil.java" };
@@ -101,10 +100,8 @@
}
public void execute() throws MojoExecutionException, MojoFailureException {
- String outputPath = project.getBuild().getDirectory() + File.separator
- + "generated-sources" + File.separator
- + "java" + File.separator
- + packageName.replace('.', File.separatorChar);
+ String outputPath = project.getBuild().getDirectory() + File.separator + "generated-sources" + File.separator
+ + "java" + File.separator + packageName.replace('.', File.separatorChar);
File dir = new File(outputPath);
if (!dir.exists()) {
dir.mkdirs();
@@ -122,7 +119,8 @@
}
}
- private void generateSource(Generator.TemplateType mgrType, String template, String recordType, String outputPath) throws MojoFailureException {
+ private void generateSource(Generator.TemplateType mgrType, String template, String recordType, String outputPath)
+ throws MojoFailureException {
InputStream is = getClass().getClassLoader().getResourceAsStream(template);
if (is == null) {
throw new MojoFailureException("template '" + template + "' not found in classpath");
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
index 3205cb6..9753bcf 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -110,14 +110,14 @@
private static String metadataNodeName;
private static List<String> nodeNames;
private static boolean isNewUniverse;
- private static final IMetadataIndex[] PRIMARY_INDEXES = new IMetadataIndex[] {
- MetadataPrimaryIndexes.DATAVERSE_DATASET, MetadataPrimaryIndexes.DATASET_DATASET,
- MetadataPrimaryIndexes.DATATYPE_DATASET, MetadataPrimaryIndexes.INDEX_DATASET,
- MetadataPrimaryIndexes.NODE_DATASET, MetadataPrimaryIndexes.NODEGROUP_DATASET,
- MetadataPrimaryIndexes.FUNCTION_DATASET, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET,
- MetadataPrimaryIndexes.FEED_DATASET, MetadataPrimaryIndexes.FEED_POLICY_DATASET,
- MetadataPrimaryIndexes.LIBRARY_DATASET, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET,
- MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET };
+ private static final IMetadataIndex[] PRIMARY_INDEXES =
+ new IMetadataIndex[] { MetadataPrimaryIndexes.DATAVERSE_DATASET, MetadataPrimaryIndexes.DATASET_DATASET,
+ MetadataPrimaryIndexes.DATATYPE_DATASET, MetadataPrimaryIndexes.INDEX_DATASET,
+ MetadataPrimaryIndexes.NODE_DATASET, MetadataPrimaryIndexes.NODEGROUP_DATASET,
+ MetadataPrimaryIndexes.FUNCTION_DATASET, MetadataPrimaryIndexes.DATASOURCE_ADAPTER_DATASET,
+ MetadataPrimaryIndexes.FEED_DATASET, MetadataPrimaryIndexes.FEED_POLICY_DATASET,
+ MetadataPrimaryIndexes.LIBRARY_DATASET, MetadataPrimaryIndexes.COMPACTION_POLICY_DATASET,
+ MetadataPrimaryIndexes.EXTERNAL_FILE_DATASET, MetadataPrimaryIndexes.FEED_CONNECTION_DATASET };
private MetadataBootstrap() {
}
@@ -267,9 +267,9 @@
private static void insertInitialCompactionPolicies(MetadataTransactionContext mdTxnCtx)
throws AlgebricksException {
- String[] builtInCompactionPolicyClassNames = new String[] { ConstantMergePolicyFactory.class.getName(),
- PrefixMergePolicyFactory.class.getName(), NoMergePolicyFactory.class.getName(),
- CorrelatedPrefixMergePolicyFactory.class.getName() };
+ String[] builtInCompactionPolicyClassNames =
+ new String[] { ConstantMergePolicyFactory.class.getName(), PrefixMergePolicyFactory.class.getName(),
+ NoMergePolicyFactory.class.getName(), CorrelatedPrefixMergePolicyFactory.class.getName() };
for (String policyClassName : builtInCompactionPolicyClassNames) {
CompactionPolicy compactionPolicy = getCompactionPolicyEntity(policyClassName);
MetadataManager.INSTANCE.addCompactionPolicy(mdTxnCtx, compactionPolicy);
@@ -289,8 +289,8 @@
private static CompactionPolicy getCompactionPolicyEntity(String compactionPolicyClassName)
throws AlgebricksException {
try {
- String policyName = ((ILSMMergePolicyFactory) (Class.forName(compactionPolicyClassName).newInstance()))
- .getName();
+ String policyName =
+ ((ILSMMergePolicyFactory) (Class.forName(compactionPolicyClassName).newInstance())).getName();
return new CompactionPolicy(MetadataConstants.METADATA_DATAVERSE_NAME, policyName,
compactionPolicyClassName);
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
@@ -327,13 +327,13 @@
// We are unable to do this since IStorageManager needs a dataset to determine
// the appropriate
// objects
- ILSMOperationTrackerFactory opTrackerFactory = index.isPrimaryIndex()
- ? new PrimaryIndexOperationTrackerFactory(datasetId)
- : new SecondaryIndexOperationTrackerFactory(datasetId);
- ILSMComponentIdGeneratorFactory idGeneratorProvider = new DatasetLSMComponentIdGeneratorFactory(
- index.getDatasetId().getId());
- ILSMIOOperationCallbackFactory ioOpCallbackFactory = new LSMBTreeIOOperationCallbackFactory(
- idGeneratorProvider);
+ ILSMOperationTrackerFactory opTrackerFactory =
+ index.isPrimaryIndex() ? new PrimaryIndexOperationTrackerFactory(datasetId)
+ : new SecondaryIndexOperationTrackerFactory(datasetId);
+ ILSMComponentIdGeneratorFactory idGeneratorProvider =
+ new DatasetLSMComponentIdGeneratorFactory(index.getDatasetId().getId());
+ ILSMIOOperationCallbackFactory ioOpCallbackFactory =
+ new LSMBTreeIOOperationCallbackFactory(idGeneratorProvider);
IStorageComponentProvider storageComponentProvider = appContext.getStorageComponentProvider();
if (isNewUniverse()) {
LSMBTreeLocalResourceFactory lsmBtreeFactory = new LSMBTreeLocalResourceFactory(
@@ -343,8 +343,8 @@
storageComponentProvider.getIoOperationSchedulerProvider(),
appContext.getMetadataMergePolicyFactory(), GlobalConfig.DEFAULT_COMPACTION_POLICY_PROPERTIES, true,
bloomFilterKeyFields, appContext.getBloomFilterFalsePositiveRate(), true, null);
- DatasetLocalResourceFactory dsLocalResourceFactory = new DatasetLocalResourceFactory(datasetId,
- lsmBtreeFactory);
+ DatasetLocalResourceFactory dsLocalResourceFactory =
+ new DatasetLocalResourceFactory(datasetId, lsmBtreeFactory);
// TODO(amoudi) Creating the index should be done through the same code path as
// other indexes
// This is to be done by having a metadata dataset associated with each index
@@ -364,8 +364,8 @@
if (index.getResourceId() != resource.getId()) {
throw new HyracksDataException("Resource Id doesn't match expected metadata index resource id");
}
- IndexDataflowHelper indexHelper = new IndexDataflowHelper(ncServiceCtx,
- storageComponentProvider.getStorageManager(), file);
+ IndexDataflowHelper indexHelper =
+ new IndexDataflowHelper(ncServiceCtx, storageComponentProvider.getStorageManager(), file);
indexHelper.open(); // Opening the index through the helper will ensure it gets instantiated
indexHelper.close();
}
@@ -420,8 +420,8 @@
LOGGER.info("Dropped a pending dataverse: " + dataverse.getDataverseName());
}
} else {
- List<Dataset> datasets = MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx,
- dataverse.getDataverseName());
+ List<Dataset> datasets =
+ MetadataManager.INSTANCE.getDataverseDatasets(mdTxnCtx, dataverse.getDataverseName());
for (Dataset dataset : datasets) {
recoverDataset(mdTxnCtx, dataset);
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
index e2724e2..54a69eb 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/bootstrap/MetadataRecordTypes.java
@@ -384,7 +384,6 @@
public static final int FEED_CONN_APPLIED_FUNCTIONS_FIELD_INDEX = 4;
public static final int FEED_CONN_POLICY_FIELD_INDEX = 5;
-
public static final ARecordType FEED_CONNECTION_RECORDTYPE = createRecordType(
// RecordTypeName
RECORD_NAME_FEED_CONNECTION,
@@ -393,7 +392,7 @@
FIELD_NAME_RETURN_TYPE, FIELD_NAME_APPLIED_FUNCTIONS, FIELD_NAME_POLICY_NAME },
// FieldTypes
new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
- new AUnorderedListType(BuiltinType.ASTRING, null), BuiltinType.ASTRING},
+ new AUnorderedListType(BuiltinType.ASTRING, null), BuiltinType.ASTRING },
//IsOpen?
true);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/AbstractClusterManagementWork.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/AbstractClusterManagementWork.java
index 3c9e2d2..c6ef6e1 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/AbstractClusterManagementWork.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/AbstractClusterManagementWork.java
@@ -39,8 +39,6 @@
this.workId = WorkIdGenerator.getNextWorkId();
}
-
-
private static class WorkIdGenerator {
private static AtomicInteger workId = new AtomicInteger(0);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagementWorkResponse.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagementWorkResponse.java
index 61a35e3..396ada6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagementWorkResponse.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/cluster/ClusterManagementWorkResponse.java
@@ -32,7 +32,6 @@
this.status = Status.IN_PROGRESS;
}
-
@Override
public IClusterManagementWork getWork() {
return work;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSource.java
index ca22567..66283d6 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DataSource.java
@@ -64,8 +64,8 @@
}
}
- public DataSource(DataSourceId id, IAType itemType, IAType metaItemType, byte datasourceType,
- INodeDomain domain) throws AlgebricksException {
+ public DataSource(DataSourceId id, IAType itemType, IAType metaItemType, byte datasourceType, INodeDomain domain)
+ throws AlgebricksException {
this.id = id;
this.itemType = itemType;
this.metaItemType = metaItemType;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
index 3f3a27f..d682484 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/DatasetDataSource.java
@@ -48,8 +48,7 @@
private Dataset dataset;
public DatasetDataSource(DataSourceId id, Dataset dataset, IAType itemType, IAType metaItemType,
- byte datasourceType, IDatasetDetails datasetDetails, INodeDomain datasetDomain)
- throws AlgebricksException {
+ byte datasourceType, IDatasetDetails datasetDetails, INodeDomain datasetDomain) throws AlgebricksException {
super(id, itemType, metaItemType, datasourceType, datasetDomain);
this.dataset = dataset;
switch (dataset.getDatasetType()) {
@@ -116,9 +115,9 @@
int[] minFilterFieldIndexes = createFilterIndexes(minFilterVars, opSchema);
int[] maxFilterFieldIndexes = createFilterIndexes(maxFilterVars, opSchema);
- return metadataProvider.buildBtreeRuntime(jobSpec, opSchema, typeEnv, context, true,
- false, ((DatasetDataSource) dataSource).getDataset(), primaryIndex.getIndexName(), null, null,
- true, true, false, minFilterFieldIndexes, maxFilterFieldIndexes);
+ return metadataProvider.buildBtreeRuntime(jobSpec, opSchema, typeEnv, context, true, false,
+ ((DatasetDataSource) dataSource).getDataset(), primaryIndex.getIndexName(), null, null, true,
+ true, false, minFilterFieldIndexes, maxFilterFieldIndexes);
default:
throw new AlgebricksException("Unknown datasource type");
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
index 6f58b0a..62337ad 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/MetadataProvider.java
@@ -297,8 +297,8 @@
}
public Dataset findDataset(String dataverse, String dataset) throws AlgebricksException {
- String dv = dataverse == null ? (defaultDataverse == null ? null : defaultDataverse.getDataverseName())
- : dataverse;
+ String dv =
+ dataverse == null ? (defaultDataverse == null ? null : defaultDataverse.getDataverseName()) : dataverse;
if (dv == null) {
return null;
}
@@ -410,10 +410,10 @@
public Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> buildFeedIntakeRuntime(
JobSpecification jobSpec, Feed feed, FeedPolicyAccessor policyAccessor) throws Exception {
Triple<IAdapterFactory, RecordDescriptor, IDataSourceAdapter.AdapterType> factoryOutput;
- factoryOutput = FeedMetadataUtil.getFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx,
- getApplicationContext());
- ARecordType recordType = FeedMetadataUtil.getOutputType(feed,
- feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME));
+ factoryOutput =
+ FeedMetadataUtil.getFeedFactoryAndOutput(feed, policyAccessor, mdTxnCtx, getApplicationContext());
+ ARecordType recordType =
+ FeedMetadataUtil.getOutputType(feed, feed.getConfiguration().get(ExternalDataConstants.KEY_TYPE_NAME));
IAdapterFactory adapterFactory = factoryOutput.first;
FeedIntakeOperatorDescriptor feedIngestor = null;
switch (factoryOutput.third) {
@@ -446,14 +446,12 @@
if (primaryIndex != null && (dataset.getDatasetType() != DatasetType.EXTERNAL)) {
isSecondary = !indexName.equals(primaryIndex.getIndexName());
}
- Index theIndex = isSecondary
- ? MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName(),
- indexName)
- : primaryIndex;
+ Index theIndex = isSecondary ? MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
+ dataset.getDatasetName(), indexName) : primaryIndex;
int numPrimaryKeys = dataset.getPrimaryKeys().size();
RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = getSplitProviderAndConstraints(dataset,
- theIndex.getIndexName());
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc =
+ getSplitProviderAndConstraints(dataset, theIndex.getIndexName());
int[] primaryKeyFields = new int[numPrimaryKeys];
for (int i = 0; i < numPrimaryKeys; i++) {
primaryKeyFields[i] = i;
@@ -491,8 +489,8 @@
"Code generation error: no index " + indexName + " for dataset " + dataset.getDatasetName());
}
RecordDescriptor outputRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = getSplitProviderAndConstraints(dataset,
- secondaryIndex.getIndexName());
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc =
+ getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
int[] primaryKeyFields = new int[numPrimaryKeys];
for (int i = 0; i < numPrimaryKeys; i++) {
primaryKeyFields[i] = i;
@@ -501,8 +499,8 @@
ISearchOperationCallbackFactory searchCallbackFactory = dataset.getSearchCallbackFactory(
storageComponentProvider, secondaryIndex, IndexOperation.SEARCH, primaryKeyFields);
RTreeSearchOperatorDescriptor rtreeSearchOp;
- IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(
- storageComponentProvider.getStorageManager(), spPc.first);
+ IIndexDataflowHelperFactory indexDataflowHelperFactory =
+ new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), spPc.first);
if (dataset.getDatasetType() == DatasetType.INTERNAL) {
rtreeSearchOp = new RTreeSearchOperatorDescriptor(jobSpec, outputRecDesc, keyFields, true, true,
indexDataflowHelperFactory, retainInput, retainMissing, context.getMissingWriterFactory(),
@@ -527,8 +525,8 @@
File outFile = new File(fs.getPath());
String nodeId = fs.getNodeName();
- SinkWriterRuntimeFactory runtime = new SinkWriterRuntimeFactory(printColumns, printerFactories, outFile,
- getWriterFactory(), inputDesc);
+ SinkWriterRuntimeFactory runtime =
+ new SinkWriterRuntimeFactory(printColumns, printerFactories, outFile, getWriterFactory(), inputDesc);
AlgebricksPartitionConstraint apc = new AlgebricksAbsolutePartitionConstraint(new String[] { nodeId });
return new Pair<>(runtime, apc);
}
@@ -577,16 +575,16 @@
fieldPermutation[numKeys + 1] = idx;
}
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = getSplitProviderAndConstraints(
- dataset);
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
+ getSplitProviderAndConstraints(dataset);
long numElementsHint = getCardinalityPerPartitionHint(dataset);
// TODO
// figure out the right behavior of the bulkload and then give the
// right callback
// (ex. what's the expected behavior when there is an error during
// bulkload?)
- IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory(
- storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
+ IIndexDataflowHelperFactory indexHelperFactory =
+ new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
LSMIndexBulkLoadOperatorDescriptor btreeBulkLoad = new LSMIndexBulkLoadOperatorDescriptor(spec, null,
fieldPermutation, GlobalConfig.DEFAULT_TREE_FILL_FACTOR, false, numElementsHint, true,
indexHelperFactory, null, BulkLoadUsage.LOAD, dataset.getDatasetId());
@@ -703,8 +701,8 @@
numElementsHint = Long.parseLong(numElementsHintString);
}
int numPartitions = 0;
- List<String> nodeGroup = MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName())
- .getNodeNames();
+ List<String> nodeGroup =
+ MetadataManager.INSTANCE.getNodegroup(mdTxnCtx, dataset.getNodeGroupName()).getNodeNames();
IClusterStateManager csm = appCtx.getClusterStateManager();
for (String nd : nodeGroup) {
numPartitions += csm.getNodePartitionsCount(nd);
@@ -720,9 +718,9 @@
getApplicationContext().getServiceContext(), adapterName, configuration, itemType, metaType);
// check to see if dataset is indexed
- Index filesIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
- dataset.getDatasetName(),
- dataset.getDatasetName().concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX));
+ Index filesIndex =
+ MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(), dataset.getDatasetName(),
+ dataset.getDatasetName().concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX));
if (filesIndex != null && filesIndex.getPendingOp() == 0) {
// get files
@@ -784,27 +782,26 @@
MetadataProvider metadataProvider, boolean retainMissing) throws AlgebricksException {
try {
// Get data type
- ARecordType itemType = (ARecordType) MetadataManager.INSTANCE
- .getDatatype(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(),
- dataset.getItemTypeName())
- .getDatatype();
+ ARecordType itemType =
+ (ARecordType) MetadataManager.INSTANCE.getDatatype(metadataProvider.getMetadataTxnContext(),
+ dataset.getDataverseName(), dataset.getItemTypeName()).getDatatype();
ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
LookupAdapterFactory<?> adapterFactory = AdapterFactoryProvider.getLookupAdapterFactory(
getApplicationContext().getServiceContext(), datasetDetails.getProperties(), itemType, ridIndexes,
retainInput, retainMissing, context.getMissingWriterFactory());
String fileIndexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName());
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc = metadataProvider
- .getSplitProviderAndConstraints(dataset, fileIndexName);
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> spPc =
+ metadataProvider.getSplitProviderAndConstraints(dataset, fileIndexName);
Index fileIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
dataset.getDatasetName(), fileIndexName);
// Create the file index data flow helper
- IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(
- storageComponentProvider.getStorageManager(), spPc.first);
+ IIndexDataflowHelperFactory indexDataflowHelperFactory =
+ new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), spPc.first);
// Create the out record descriptor, appContext and fileSplitProvider for the
// files index
RecordDescriptor outRecDesc = JobGenHelper.mkRecordDescriptor(typeEnv, opSchema, context);
- ISearchOperationCallbackFactory searchOpCallbackFactory = dataset
- .getSearchCallbackFactory(storageComponentProvider, fileIndex, IndexOperation.SEARCH, null);
+ ISearchOperationCallbackFactory searchOpCallbackFactory =
+ dataset.getSearchCallbackFactory(storageComponentProvider, fileIndex, IndexOperation.SEARCH, null);
// Create the operator
ExternalLookupOperatorDescriptor op = new ExternalLookupOperatorDescriptor(jobSpec, adapterFactory,
outRecDesc, indexDataflowHelperFactory, searchOpCallbackFactory,
@@ -865,12 +862,12 @@
throw new AlgebricksException("Can only scan datasets of records.");
}
- ISerializerDeserializer<?> payloadSerde = getDataFormat().getSerdeProvider()
- .getSerializerDeserializer(itemType);
+ ISerializerDeserializer<?> payloadSerde =
+ getDataFormat().getSerdeProvider().getSerializerDeserializer(itemType);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] { payloadSerde });
- ExternalScanOperatorDescriptor dataScanner = new ExternalScanOperatorDescriptor(jobSpec, scannerDesc,
- adapterFactory);
+ ExternalScanOperatorDescriptor dataScanner =
+ new ExternalScanOperatorDescriptor(jobSpec, scannerDesc, adapterFactory);
AlgebricksPartitionConstraint constraint;
try {
@@ -896,9 +893,9 @@
int i = 0;
for (; i < sidxKeyFieldCount; ++i) {
- Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(sidxKeyFieldTypes.get(i),
- sidxKeyFieldNames.get(i),
- (hasMeta && secondaryIndexIndicators.get(i).intValue() == 1) ? metaType : recType);
+ Pair<IAType, Boolean> keyPairType =
+ Index.getNonNullableOpenFieldType(sidxKeyFieldTypes.get(i), sidxKeyFieldNames.get(i),
+ (hasMeta && secondaryIndexIndicators.get(i).intValue() == 1) ? metaType : recType);
IAType keyType = keyPairType.first;
comparatorFactories[i] = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(keyType, true);
typeTraits[i] = TypeTraitProvider.INSTANCE.getTypeTrait(keyType);
@@ -936,8 +933,8 @@
List<LogicalVariable> additionalNonFilteringFields) throws AlgebricksException {
String datasetName = dataSource.getId().getDatasourceName();
- Dataset dataset = MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataSource.getId().getDataverseName(),
- datasetName);
+ Dataset dataset =
+ MetadataManagerUtil.findExistingDataset(mdTxnCtx, dataSource.getId().getDataverseName(), datasetName);
int numKeys = keys.size();
int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1;
// Move key fields to front.
@@ -965,8 +962,8 @@
Index primaryIndex = MetadataManager.INSTANCE.getIndex(mdTxnCtx, dataset.getDataverseName(),
dataset.getDatasetName(), dataset.getDatasetName());
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = getSplitProviderAndConstraints(
- dataset);
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
+ getSplitProviderAndConstraints(dataset);
// prepare callback
int[] primaryKeyFields = new int[numKeys];
@@ -975,8 +972,8 @@
}
IModificationOperationCallbackFactory modificationCallbackFactory = dataset
.getModificationCallbackFactory(storageComponentProvider, primaryIndex, indexOp, primaryKeyFields);
- IIndexDataflowHelperFactory idfh = new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(),
- splitsAndConstraint.first);
+ IIndexDataflowHelperFactory idfh =
+ new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
IOperatorDescriptor op;
if (bulkload) {
long numElementsHint = getCardinalityPerPartitionHint(dataset);
@@ -1135,8 +1132,8 @@
dataset.getDatasetName(), indexName);
List<List<String>> secondaryKeyExprs = secondaryIndex.getKeyFieldNames();
List<IAType> secondaryKeyTypes = secondaryIndex.getKeyFieldTypes();
- Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0),
- secondaryKeyExprs.get(0), recType);
+ Pair<IAType, Boolean> keyPairType =
+ Index.getNonNullableOpenFieldType(secondaryKeyTypes.get(0), secondaryKeyExprs.get(0), recType);
IAType spatialType = keyPairType.first;
int dimension = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
int numSecondaryKeys = dimension * 2;
@@ -1189,14 +1186,14 @@
prevFieldPermutation[numKeys] = idx;
}
}
- Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = getSplitProviderAndConstraints(
- dataset, secondaryIndex.getIndexName());
+ Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint =
+ getSplitProviderAndConstraints(dataset, secondaryIndex.getIndexName());
// prepare callback
IModificationOperationCallbackFactory modificationCallbackFactory = dataset.getModificationCallbackFactory(
storageComponentProvider, secondaryIndex, indexOp, modificationCallbackPrimaryKeyFields);
- IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(
- storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
+ IIndexDataflowHelperFactory indexDataflowHelperFactory =
+ new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), splitsAndConstraint.first);
IOperatorDescriptor op;
if (bulkload) {
long numElementsHint = getCardinalityPerPartitionHint(dataset);
@@ -1517,8 +1514,8 @@
return null;
}
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
- IScalarEvaluatorFactory filterEvalFactory = expressionRuntimeProvider.createEvaluatorFactory(filterExpr,
- typeEnv, inputSchemas, context);
+ IScalarEvaluatorFactory filterEvalFactory =
+ expressionRuntimeProvider.createEvaluatorFactory(filterExpr, typeEnv, inputSchemas, context);
return new AsterixTupleFilterFactory(filterEvalFactory, context.getBinaryBooleanInspectorFactory());
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
index 60dd158..ea2d715 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/Dataset.java
@@ -586,16 +586,15 @@
* If the callback factory could not be created
*/
public IModificationOperationCallbackFactory getModificationCallbackFactory(
- IStorageComponentProvider componentProvider, Index index, IndexOperation op,
- int[] primaryKeyFields) throws AlgebricksException {
+ IStorageComponentProvider componentProvider, Index index, IndexOperation op, int[] primaryKeyFields)
+ throws AlgebricksException {
if (index.isPrimaryIndex()) {
return op == IndexOperation.UPSERT ? new UpsertOperationCallbackFactory(getDatasetId(), primaryKeyFields,
- componentProvider.getTransactionSubsystemProvider(), Operation.get(op),
- index.resourceType())
+ componentProvider.getTransactionSubsystemProvider(), Operation.get(op), index.resourceType())
: op == IndexOperation.DELETE || op == IndexOperation.INSERT
- ? new PrimaryIndexModificationOperationCallbackFactory(getDatasetId(),
- primaryKeyFields, componentProvider.getTransactionSubsystemProvider(),
- Operation.get(op), index.resourceType())
+ ? new PrimaryIndexModificationOperationCallbackFactory(getDatasetId(), primaryKeyFields,
+ componentProvider.getTransactionSubsystemProvider(), Operation.get(op),
+ index.resourceType())
: NoOpOperationCallbackFactory.INSTANCE;
} else {
return op == IndexOperation.DELETE || op == IndexOperation.INSERT || op == IndexOperation.UPSERT
@@ -656,8 +655,8 @@
public IPushRuntimeFactory getCommitRuntimeFactory(MetadataProvider metadataProvider,
int[] primaryKeyFieldPermutation, boolean isSink) throws AlgebricksException {
int[] datasetPartitions = getDatasetPartitions(metadataProvider);
- return new CommitRuntimeFactory(datasetId, primaryKeyFieldPermutation,
- metadataProvider.isWriteTransaction(), datasetPartitions, isSink);
+ return new CommitRuntimeFactory(datasetId, primaryKeyFieldPermutation, metadataProvider.isWriteTransaction(),
+ datasetPartitions, isSink);
}
public IFrameOperationCallbackFactory getFrameOpCallbackFactory() {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
index 2f9f9cb..48d2908 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entities/ExternalDatasetDetails.java
@@ -89,12 +89,12 @@
externalRecordBuilder.reset(MetadataRecordTypes.EXTERNAL_DETAILS_RECORDTYPE);
AMutableString aString = new AMutableString("");
- ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
- ISerializerDeserializer<ADateTime> dateTimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
- ISerializerDeserializer<AInt32> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
+ ISerializerDeserializer<ADateTime> dateTimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
+ ISerializerDeserializer<AInt32> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
// write field 0
fieldValue.reset();
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
index 2d2f895..e9e6366 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/entitytupletranslators/AbstractTupleTranslator.java
@@ -41,14 +41,14 @@
private static final long serialVersionUID = 1L;
protected AMutableString aString = new AMutableString("");
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ protected ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ protected ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt32> int32Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ protected ISerializerDeserializer<AInt32> int32Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
protected final transient IARecordBuilder recordBuilder;
protected final transient ArrayBackedValueStorage fieldValue;
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index b2a7ca6..8a0cf84 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -275,8 +275,7 @@
}
}
- public static ARecordType getOutputType(IFeed feed, String fqOutputType)
- throws AlgebricksException {
+ public static ARecordType getOutputType(IFeed feed, String fqOutputType) throws AlgebricksException {
ARecordType outputType = null;
if (fqOutputType == null) {
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java
index 8625756..93165dc 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/functions/ExternalScalarFunctionInfo.java
@@ -29,10 +29,9 @@
private static final long serialVersionUID = 1L;
- public ExternalScalarFunctionInfo(String namespace, String name, int arity, IAType
- returnType,
- String body, String language, List<IAType> argumentTypes, IResultTypeComputer rtc) {
- super(namespace, name, arity , FunctionKind.SCALAR, argumentTypes, returnType, rtc, body, language);
+ public ExternalScalarFunctionInfo(String namespace, String name, int arity, IAType returnType, String body,
+ String language, List<IAType> argumentTypes, IResultTypeComputer rtc) {
+ super(namespace, name, arity, FunctionKind.SCALAR, argumentTypes, returnType, rtc, body, language);
}
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
index c34cc3c..6d81145 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/DatasetUtil.java
@@ -350,9 +350,8 @@
int[] highKeyFields = null;
ITransactionSubsystemProvider txnSubsystemProvider = TransactionSubsystemProvider.INSTANCE;
ISearchOperationCallbackFactory searchCallbackFactory = new PrimaryIndexInstantSearchOperationCallbackFactory(
- dataset.getDatasetId(),
- dataset.getPrimaryBloomFilterFields(), txnSubsystemProvider,
- IRecoveryManager.ResourceType.LSM_BTREE);
+ dataset.getDatasetId(), dataset.getPrimaryBloomFilterFields(), txnSubsystemProvider,
+ IRecoveryManager.ResourceType.LSM_BTREE);
IndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory(
metadataProvider.getStorageComponentProvider().getStorageManager(), primaryFileSplitProvider);
BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec,
@@ -557,8 +556,7 @@
if (i > 0 && i < datasetArg.length() - 1) {
first = datasetArg.substring(0, i);
second = datasetArg.substring(i + 1);
- }
- else {
+ } else {
first = metadata.getDefaultDataverse() == null ? null : metadata.getDefaultDataverse().getDataverseName();
second = datasetArg;
}
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
index ef3346b..a3c3842 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/ExternalIndexingOperations.java
@@ -203,8 +203,8 @@
mergePolicyFactory, mergePolicyProperties);
IIndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(),
secondaryFileSplitProvider, resourceFactory, true);
- IIndexDataflowHelperFactory dataflowHelperFactory =
- new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider);
+ IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(
+ storageComponentProvider.getStorageManager(), secondaryFileSplitProvider);
ExternalFilesIndexCreateOperatorDescriptor externalFilesOp = new ExternalFilesIndexCreateOperatorDescriptor(
spec, indexBuilderFactory, dataflowHelperFactory, externalFilesSnapshot);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp,
@@ -221,8 +221,8 @@
Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
.getSplitProviderAndConstraints(dataset, IndexingConstants.getFilesIndexName(dataset.getDatasetName()));
IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
- IIndexDataflowHelperFactory dataflowHelperFactory =
- new IndexDataflowHelperFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider);
+ IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(
+ storageComponentProvider.getStorageManager(), secondaryFileSplitProvider);
ExternalFilesIndexModificationOperatorDescriptor externalFilesOp =
new ExternalFilesIndexModificationOperatorDescriptor(spec, dataflowHelperFactory,
externalFilesSnapshot);
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedTreeIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedTreeIndexOperationsHelper.java
index 0a772fa..48982c0 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedTreeIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryCorrelatedTreeIndexOperationsHelper.java
@@ -263,8 +263,8 @@
LSMSecondaryIndexBulkLoadOperatorDescriptor treeIndexBulkLoadOp =
new LSMSecondaryIndexBulkLoadOperatorDescriptor(spec, taggedSecondaryRecDesc, primaryIndexHelperFactory,
- secondaryIndexHelperFactory, fieldPermutation, NUM_TAG_FIELDS, numSecondaryKeys,
- numPrimaryKeys, hasBuddyBtree);
+ secondaryIndexHelperFactory, fieldPermutation, NUM_TAG_FIELDS, numSecondaryKeys, numPrimaryKeys,
+ hasBuddyBtree);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, treeIndexBulkLoadOp,
secondaryPartitionConstraint);
return treeIndexBulkLoadOp;
@@ -274,9 +274,8 @@
MetadataProvider metadataProvider, RecordDescriptor outRecDesc) throws AlgebricksException {
ITransactionSubsystemProvider txnSubsystemProvider = TransactionSubsystemProvider.INSTANCE;
ISearchOperationCallbackFactory searchCallbackFactory = new PrimaryIndexInstantSearchOperationCallbackFactory(
- dataset.getDatasetId(),
- dataset.getPrimaryBloomFilterFields(), txnSubsystemProvider,
- IRecoveryManager.ResourceType.LSM_BTREE);
+ dataset.getDatasetId(), dataset.getPrimaryBloomFilterFields(), txnSubsystemProvider,
+ IRecoveryManager.ResourceType.LSM_BTREE);
IndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory(
metadataProvider.getStorageComponentProvider().getStorageManager(), primaryFileSplitProvider);
LSMBTreeDiskComponentScanOperatorDescriptor primaryScanOp = new LSMBTreeDiskComponentScanOperatorDescriptor(
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
index 7701f65..065eb72 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryIndexOperationsHelper.java
@@ -147,19 +147,19 @@
SecondaryIndexOperationsHelper indexOperationsHelper;
switch (index.getIndexType()) {
case BTREE:
- indexOperationsHelper = new SecondaryBTreeOperationsHelper(dataset, index, physOptConf,
- metadataProvider);
+ indexOperationsHelper =
+ new SecondaryBTreeOperationsHelper(dataset, index, physOptConf, metadataProvider);
break;
case RTREE:
- indexOperationsHelper = new SecondaryRTreeOperationsHelper(dataset, index, physOptConf,
- metadataProvider);
+ indexOperationsHelper =
+ new SecondaryRTreeOperationsHelper(dataset, index, physOptConf, metadataProvider);
break;
case SINGLE_PARTITION_WORD_INVIX:
case SINGLE_PARTITION_NGRAM_INVIX:
case LENGTH_PARTITIONED_WORD_INVIX:
case LENGTH_PARTITIONED_NGRAM_INVIX:
- indexOperationsHelper = new SecondaryInvertedIndexOperationsHelper(dataset, index, physOptConf,
- metadataProvider);
+ indexOperationsHelper =
+ new SecondaryInvertedIndexOperationsHelper(dataset, index, physOptConf, metadataProvider);
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, index.getIndexType());
@@ -265,7 +265,6 @@
protected abstract void setSecondaryRecDescAndComparators() throws AlgebricksException;
-
protected AlgebricksMetaOperatorDescriptor createAssignOp(JobSpecification spec, int numSecondaryKeyFields,
RecordDescriptor secondaryRecDesc) throws AlgebricksException {
int[] outColumns = new int[numSecondaryKeyFields + numFilterFields];
diff --git a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
index 1c9eb74..d31ca3b 100644
--- a/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
+++ b/asterixdb/asterix-metadata/src/main/java/org/apache/asterix/metadata/utils/SecondaryInvertedIndexOperationsHelper.java
@@ -212,8 +212,7 @@
IOperatorDescriptor keyProviderOp = DatasetUtil.createDummyKeyProviderOp(spec, dataset, metadataProvider);
// Create primary index scan op.
- IOperatorDescriptor primaryScanOp =
- DatasetUtil.createPrimaryIndexScanOp(spec, metadataProvider, dataset);
+ IOperatorDescriptor primaryScanOp = DatasetUtil.createPrimaryIndexScanOp(spec, metadataProvider, dataset);
IOperatorDescriptor sourceOp = primaryScanOp;
boolean isOverridingKeyFieldTypes = index.isOverridingKeyFieldTypes();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
index 797901e..975b4f4 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/builders/RecordBuilder.java
@@ -80,10 +80,10 @@
this.openFieldNameLengths = new int[DEFAULT_NUM_OPEN_FIELDS];
this.numberOfOpenFields = 0;
- this.utf8HashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
- .createBinaryHashFunction();
- this.utf8Comparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
- .createBinaryComparator();
+ this.utf8HashFunction =
+ new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY).createBinaryHashFunction();
+ this.utf8Comparator =
+ new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY).createBinaryComparator();
this.openPartOffsetArray = null;
this.openPartOffsetArraySize = 0;
@@ -195,11 +195,11 @@
}
if (numberOfOpenFields == openPartOffsets.length) {
openPartOffsets = Arrays.copyOf(openPartOffsets, openPartOffsets.length + DEFAULT_NUM_OPEN_FIELDS);
- openFieldNameLengths = Arrays.copyOf(openFieldNameLengths,
- openFieldNameLengths.length + DEFAULT_NUM_OPEN_FIELDS);
+ openFieldNameLengths =
+ Arrays.copyOf(openFieldNameLengths, openFieldNameLengths.length + DEFAULT_NUM_OPEN_FIELDS);
}
- int fieldNameHashCode = utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1,
- name.getLength() - 1);
+ int fieldNameHashCode =
+ utf8HashFunction.hash(name.getByteArray(), name.getStartOffset() + 1, name.getLength() - 1);
if (recType != null) {
int cFieldPos;
cFieldPos = recTypeInfo.getFieldIndex(name.getByteArray(), name.getStartOffset() + 1, name.getLength() - 1);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/PartialAggregationTypeComputer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/PartialAggregationTypeComputer.java
index 1a73af3..4a0384d 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/PartialAggregationTypeComputer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/common/PartialAggregationTypeComputer.java
@@ -38,8 +38,8 @@
if (partialFid.equals(BuiltinFunctions.SERIAL_GLOBAL_AVG)) {
partialFid = BuiltinFunctions.SERIAL_LOCAL_AVG;
}
- AggregateFunctionCallExpression partialAgg = BuiltinFunctions.makeAggregateFunctionExpression(partialFid,
- agg.getArguments());
+ AggregateFunctionCallExpression partialAgg =
+ BuiltinFunctions.makeAggregateFunctionExpression(partialFid, agg.getArguments());
return getTypeForFunction(partialAgg, env, metadataProvider);
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ACirclePartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ACirclePartialBinaryComparatorFactory.java
index a5709ba..961b4dd 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ACirclePartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ACirclePartialBinaryComparatorFactory.java
@@ -49,24 +49,24 @@
int c = Double
.compare(
ADoubleSerializerDeserializer.getDouble(
- b1,
- s1
- + ACircleSerializerDeserializer
- .getCenterPointCoordinateOffset(Coordinate.X) - 1),
- ADoubleSerializerDeserializer.getDouble(
- b2,
- s2
- + ACircleSerializerDeserializer
- .getCenterPointCoordinateOffset(Coordinate.X) - 1));
+ b1, s1 + ACircleSerializerDeserializer
+ .getCenterPointCoordinateOffset(Coordinate.X) - 1),
+ ADoubleSerializerDeserializer.getDouble(b2, s2
+ + ACircleSerializerDeserializer.getCenterPointCoordinateOffset(Coordinate.X)
+ - 1));
if (c == 0) {
// center.y
- c = Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + ACircleSerializerDeserializer.getCenterPointCoordinateOffset(Coordinate.Y)
- - 1),
- ADoubleSerializerDeserializer.getDouble(b2,
- s2 + ACircleSerializerDeserializer.getCenterPointCoordinateOffset(Coordinate.Y)
- - 1));
+ c = Double
+ .compare(
+ ADoubleSerializerDeserializer.getDouble(b1,
+ s1 + ACircleSerializerDeserializer
+ .getCenterPointCoordinateOffset(Coordinate.Y) - 1),
+ ADoubleSerializerDeserializer
+ .getDouble(
+ b2, s2
+ + ACircleSerializerDeserializer
+ .getCenterPointCoordinateOffset(Coordinate.Y)
+ - 1));
if (c == 0) {
// radius
return Double.compare(
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ADurationPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ADurationPartialBinaryComparatorFactory.java
index da8696b..ed9c051 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ADurationPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ADurationPartialBinaryComparatorFactory.java
@@ -28,7 +28,8 @@
private static final long serialVersionUID = 1L;
- public static final ADurationPartialBinaryComparatorFactory INSTANCE = new ADurationPartialBinaryComparatorFactory();
+ public static final ADurationPartialBinaryComparatorFactory INSTANCE =
+ new ADurationPartialBinaryComparatorFactory();
private ADurationPartialBinaryComparatorFactory() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalAscPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalAscPartialBinaryComparatorFactory.java
index bb96d5d..fb2b112 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalAscPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalAscPartialBinaryComparatorFactory.java
@@ -26,7 +26,8 @@
private static final long serialVersionUID = 1L;
- public static final AIntervalAscPartialBinaryComparatorFactory INSTANCE = new AIntervalAscPartialBinaryComparatorFactory();
+ public static final AIntervalAscPartialBinaryComparatorFactory INSTANCE =
+ new AIntervalAscPartialBinaryComparatorFactory();
private AIntervalAscPartialBinaryComparatorFactory() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalDescPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalDescPartialBinaryComparatorFactory.java
index 5ee553c..32994ab 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalDescPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AIntervalDescPartialBinaryComparatorFactory.java
@@ -26,7 +26,8 @@
private static final long serialVersionUID = 1L;
- public static final AIntervalDescPartialBinaryComparatorFactory INSTANCE = new AIntervalDescPartialBinaryComparatorFactory();
+ public static final AIntervalDescPartialBinaryComparatorFactory INSTANCE =
+ new AIntervalDescPartialBinaryComparatorFactory();
private AIntervalDescPartialBinaryComparatorFactory() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ALinePartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ALinePartialBinaryComparatorFactory.java
index 9eae119..405e063 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ALinePartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ALinePartialBinaryComparatorFactory.java
@@ -45,39 +45,50 @@
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
try {
- int c = Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + ALineSerializerDeserializer.getStartPointCoordinateOffset(Coordinate.X) - 1),
- ADoubleSerializerDeserializer.getDouble(b2,
- s2 + ALineSerializerDeserializer.getStartPointCoordinateOffset(Coordinate.X) - 1));
- if (c == 0) {
- c = Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + ALineSerializerDeserializer.getStartPointCoordinateOffset(Coordinate.Y)
- - 1),
- ADoubleSerializerDeserializer.getDouble(b2,
- s2 + ALineSerializerDeserializer.getStartPointCoordinateOffset(Coordinate.Y)
- - 1));
- if (c == 0) {
- c = Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + ALineSerializerDeserializer.getEndPointCoordinateOffset(Coordinate.X)
- - 1),
+ int c = Double
+ .compare(
+ ADoubleSerializerDeserializer.getDouble(
+ b1, s1 + ALineSerializerDeserializer
+ .getStartPointCoordinateOffset(Coordinate.X) - 1),
ADoubleSerializerDeserializer.getDouble(b2,
- s2 + ALineSerializerDeserializer.getEndPointCoordinateOffset(Coordinate.X)
+ s2 + ALineSerializerDeserializer.getStartPointCoordinateOffset(Coordinate.X)
- 1));
+ if (c == 0) {
+ c = Double
+ .compare(
+ ADoubleSerializerDeserializer.getDouble(b1,
+ s1 + ALineSerializerDeserializer
+ .getStartPointCoordinateOffset(Coordinate.Y) - 1),
+ ADoubleSerializerDeserializer
+ .getDouble(
+ b2, s2
+ + ALineSerializerDeserializer
+ .getStartPointCoordinateOffset(Coordinate.Y)
+ - 1));
+ if (c == 0) {
+ c = Double
+ .compare(
+ ADoubleSerializerDeserializer.getDouble(b1,
+ s1 + ALineSerializerDeserializer
+ .getEndPointCoordinateOffset(Coordinate.X) - 1),
+ ADoubleSerializerDeserializer
+ .getDouble(
+ b2, s2
+ + ALineSerializerDeserializer
+ .getEndPointCoordinateOffset(Coordinate.X)
+ - 1));
if (c == 0) {
- return Double.compare(
- ADoubleSerializerDeserializer.getDouble(
- b1,
- s1
- + ALineSerializerDeserializer
- .getEndPointCoordinateOffset(Coordinate.Y) - 1),
- ADoubleSerializerDeserializer.getDouble(
- b2,
- s2
- + ALineSerializerDeserializer
- .getEndPointCoordinateOffset(Coordinate.Y) - 1));
+ return Double
+ .compare(
+ ADoubleSerializerDeserializer
+ .getDouble(b1,
+ s1 + ALineSerializerDeserializer
+ .getEndPointCoordinateOffset(Coordinate.Y) - 1),
+ ADoubleSerializerDeserializer
+ .getDouble(b2,
+ s2 + ALineSerializerDeserializer
+ .getEndPointCoordinateOffset(Coordinate.Y)
+ - 1));
}
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
index e74d5ec..61b47c2 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectAscBinaryComparatorFactory.java
@@ -57,51 +57,51 @@
// BOOLEAN
final IBinaryComparator ascBoolComp = BooleanBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// TINYINT
- final IBinaryComparator ascByteComp = new PointableBinaryComparatorFactory(BytePointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascByteComp =
+ new PointableBinaryComparatorFactory(BytePointable.FACTORY).createBinaryComparator();
// SMALLINT
- final IBinaryComparator ascShortComp = new PointableBinaryComparatorFactory(ShortPointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascShortComp =
+ new PointableBinaryComparatorFactory(ShortPointable.FACTORY).createBinaryComparator();
// INTEGER
- final IBinaryComparator ascIntComp = new PointableBinaryComparatorFactory(IntegerPointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascIntComp =
+ new PointableBinaryComparatorFactory(IntegerPointable.FACTORY).createBinaryComparator();
// BIGINT
final IBinaryComparator ascLongComp = LongBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// STRING
- final IBinaryComparator ascStrComp = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascStrComp =
+ new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY).createBinaryComparator();
// BINARY
- final IBinaryComparator ascByteArrayComp = new PointableBinaryComparatorFactory(ByteArrayPointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascByteArrayComp =
+ new PointableBinaryComparatorFactory(ByteArrayPointable.FACTORY).createBinaryComparator();
// FLOAT
- final IBinaryComparator ascFloatComp = new PointableBinaryComparatorFactory(FloatPointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascFloatComp =
+ new PointableBinaryComparatorFactory(FloatPointable.FACTORY).createBinaryComparator();
// DOUBLE
- final IBinaryComparator ascDoubleComp = new PointableBinaryComparatorFactory(DoublePointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascDoubleComp =
+ new PointableBinaryComparatorFactory(DoublePointable.FACTORY).createBinaryComparator();
// RECTANGLE
- final IBinaryComparator ascRectangleComp = ARectanglePartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascRectangleComp =
+ ARectanglePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// CIRCLE
- final IBinaryComparator ascCircleComp = ACirclePartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascCircleComp =
+ ACirclePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// DURATION
- final IBinaryComparator ascDurationComp = ADurationPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascDurationComp =
+ ADurationPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// INTERVAL
- final IBinaryComparator ascIntervalComp = AIntervalAscPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascIntervalComp =
+ AIntervalAscPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// LINE
final IBinaryComparator ascLineComp = ALinePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// POINT
- final IBinaryComparator ascPointComp = APointPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascPointComp =
+ APointPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// POINT3D
- final IBinaryComparator ascPoint3DComp = APoint3DPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascPoint3DComp =
+ APoint3DPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// POLYGON
- final IBinaryComparator ascPolygonComp = APolygonPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascPolygonComp =
+ APolygonPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// UUID
final IBinaryComparator ascUUIDComp = AUUIDPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
// RAW
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectDescBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectDescBinaryComparatorFactory.java
index 5db1bcd..3260527 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectDescBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AObjectDescBinaryComparatorFactory.java
@@ -41,8 +41,8 @@
// INTERVAL
// Interval asc and desc comparator factories are not the inverse of each other.
// Thus, we need to specify the interval desc comparator factory for descending comparisons.
- final IBinaryComparator descIntervalComp = AIntervalDescPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator descIntervalComp =
+ AIntervalDescPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) throws HyracksDataException {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APoint3DPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APoint3DPartialBinaryComparatorFactory.java
index de07035..dfba1d1 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APoint3DPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APoint3DPartialBinaryComparatorFactory.java
@@ -45,21 +45,29 @@
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
try {
- int c = Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.X) - 1),
- ADoubleSerializerDeserializer.getDouble(b2,
- s2 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.X) - 1));
+ int c = Double
+ .compare(
+ ADoubleSerializerDeserializer
+ .getDouble(b1,
+ s1 + APoint3DSerializerDeserializer
+ .getCoordinateOffset(Coordinate.X) - 1),
+ ADoubleSerializerDeserializer.getDouble(b2,
+ s2 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.X) - 1));
if (c == 0) {
- c = Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.Y) - 1),
- ADoubleSerializerDeserializer.getDouble(b2,
- s2 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.Y) - 1));
+ c = Double
+ .compare(
+ ADoubleSerializerDeserializer.getDouble(
+ b1, s1 + APoint3DSerializerDeserializer
+ .getCoordinateOffset(Coordinate.Y) - 1),
+ ADoubleSerializerDeserializer.getDouble(b2,
+ s2 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.Y)
+ - 1));
if (c == 0) {
return Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.Z) - 1),
+ ADoubleSerializerDeserializer
+ .getDouble(b1,
+ s1 + APoint3DSerializerDeserializer
+ .getCoordinateOffset(Coordinate.Z) - 1),
ADoubleSerializerDeserializer.getDouble(b2,
s2 + APoint3DSerializerDeserializer.getCoordinateOffset(Coordinate.Z) - 1));
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APointPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APointPartialBinaryComparatorFactory.java
index a6df87c..b0eeb2b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APointPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APointPartialBinaryComparatorFactory.java
@@ -48,11 +48,13 @@
ADoubleSerializerDeserializer.getDouble(b2,
s2 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.X) - 1));
if (c == 0) {
- return Double.compare(
- ADoubleSerializerDeserializer.getDouble(b1,
- s1 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y) - 1),
- ADoubleSerializerDeserializer.getDouble(b2,
- s2 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y) - 1));
+ return Double
+ .compare(
+ ADoubleSerializerDeserializer.getDouble(
+ b1, s1 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)
+ - 1),
+ ADoubleSerializerDeserializer.getDouble(b2, s2
+ + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y) - 1));
}
return c;
} catch (HyracksDataException hex) {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APolygonPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APolygonPartialBinaryComparatorFactory.java
index 407fa23..577f6a3 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APolygonPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/APolygonPartialBinaryComparatorFactory.java
@@ -48,27 +48,31 @@
try {
short pointCount1 = AInt16SerializerDeserializer.getShort(b1,
s1 + APolygonSerializerDeserializer.getNumberOfPointsOffset() - 1);
- int c = Short.compare(
- pointCount1,
- AInt16SerializerDeserializer.getShort(b2,
- s2 + APolygonSerializerDeserializer.getNumberOfPointsOffset() - 1));
+ int c = Short.compare(pointCount1, AInt16SerializerDeserializer.getShort(b2,
+ s2 + APolygonSerializerDeserializer.getNumberOfPointsOffset() - 1));
if (c == 0) {
int ci = 0;
for (int i = 0; i < pointCount1; i++) {
- ci = Double.compare(
- DoublePointable.getDouble(b1, s1 + APolygonSerializerDeserializer.getCoordinateOffset(i, Coordinate.X)
- - 1),
- DoublePointable.getDouble(b2, s1 + APolygonSerializerDeserializer.getCoordinateOffset(i, Coordinate.X)
- - 1));
+ ci = Double
+ .compare(
+ DoublePointable.getDouble(b1,
+ s1 + APolygonSerializerDeserializer.getCoordinateOffset(i,
+ Coordinate.X) - 1),
+ DoublePointable.getDouble(b2, s1 + APolygonSerializerDeserializer
+ .getCoordinateOffset(i, Coordinate.X) - 1));
if (ci == 0) {
- ci = Double.compare(
- DoublePointable.getDouble(b1, s1
- + APolygonSerializerDeserializer.getCoordinateOffset(i,
- Coordinate.Y) - 1),
- DoublePointable.getDouble(b2, s1
- + APolygonSerializerDeserializer.getCoordinateOffset(i,
- Coordinate.Y) - 1));
+ ci = Double
+ .compare(
+ DoublePointable.getDouble(b1,
+ s1 + APolygonSerializerDeserializer.getCoordinateOffset(i,
+ Coordinate.Y) - 1),
+ DoublePointable
+ .getDouble(
+ b2, s1
+ + APolygonSerializerDeserializer
+ .getCoordinateOffset(i, Coordinate.Y)
+ - 1));
if (ci == 0) {
continue;
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ARectanglePartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ARectanglePartialBinaryComparatorFactory.java
index 4bd7c78..12f358f 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ARectanglePartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ARectanglePartialBinaryComparatorFactory.java
@@ -25,7 +25,8 @@
public class ARectanglePartialBinaryComparatorFactory implements IBinaryComparatorFactory {
private static final long serialVersionUID = 1L;
- public final static ARectanglePartialBinaryComparatorFactory INSTANCE = new ARectanglePartialBinaryComparatorFactory();
+ public final static ARectanglePartialBinaryComparatorFactory INSTANCE =
+ new ARectanglePartialBinaryComparatorFactory();
private ARectanglePartialBinaryComparatorFactory() {
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AUUIDPartialBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AUUIDPartialBinaryComparatorFactory.java
index 67233e0..0810341 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AUUIDPartialBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/AUUIDPartialBinaryComparatorFactory.java
@@ -35,11 +35,9 @@
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
- int msbCompare = Long.compare(LongPointable.getLong(b1, s1),
- LongPointable.getLong(b2, s2));
+ int msbCompare = Long.compare(LongPointable.getLong(b1, s1), LongPointable.getLong(b2, s2));
if (msbCompare == 0) {
- return Long.compare(LongPointable.getLong(b1, s1 + 8),
- LongPointable.getLong(b2, s2 + 8));
+ return Long.compare(LongPointable.getLong(b1, s1 + 8), LongPointable.getLong(b2, s2 + 8));
} else {
return msbCompare;
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
index 09db0ba..cff39a70 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/comparators/ListItemBinaryComparatorFactory.java
@@ -50,35 +50,35 @@
final boolean ignoreCase) {
return new IBinaryComparator() {
final IBinaryComparator ascBoolComp = BooleanBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- final IBinaryComparator ascIntComp = new PointableBinaryComparatorFactory(IntegerPointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascIntComp =
+ new PointableBinaryComparatorFactory(IntegerPointable.FACTORY).createBinaryComparator();
final IBinaryComparator ascLongComp = LongBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- final IBinaryComparator ascStrComp = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
- .createBinaryComparator();
- final IBinaryComparator ascLowerCaseStrComp = new PointableBinaryComparatorFactory(
- UTF8StringLowercasePointable.FACTORY).createBinaryComparator();
- final IBinaryComparator ascFloatComp = new PointableBinaryComparatorFactory(FloatPointable.FACTORY)
- .createBinaryComparator();
- final IBinaryComparator ascDoubleComp = new PointableBinaryComparatorFactory(DoublePointable.FACTORY)
- .createBinaryComparator();
- final IBinaryComparator ascRectangleComp = ARectanglePartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- final IBinaryComparator ascCircleComp = ACirclePartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- final IBinaryComparator ascDurationComp = ADurationPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- final IBinaryComparator ascIntervalComp = AIntervalAscPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascStrComp =
+ new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY).createBinaryComparator();
+ final IBinaryComparator ascLowerCaseStrComp =
+ new PointableBinaryComparatorFactory(UTF8StringLowercasePointable.FACTORY).createBinaryComparator();
+ final IBinaryComparator ascFloatComp =
+ new PointableBinaryComparatorFactory(FloatPointable.FACTORY).createBinaryComparator();
+ final IBinaryComparator ascDoubleComp =
+ new PointableBinaryComparatorFactory(DoublePointable.FACTORY).createBinaryComparator();
+ final IBinaryComparator ascRectangleComp =
+ ARectanglePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ final IBinaryComparator ascCircleComp =
+ ACirclePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ final IBinaryComparator ascDurationComp =
+ ADurationPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ final IBinaryComparator ascIntervalComp =
+ AIntervalAscPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
final IBinaryComparator ascLineComp = ALinePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- final IBinaryComparator ascPointComp = APointPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- final IBinaryComparator ascPoint3DComp = APoint3DPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- final IBinaryComparator ascPolygonComp = APolygonPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ final IBinaryComparator ascPointComp =
+ APointPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ final IBinaryComparator ascPoint3DComp =
+ APoint3DPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ final IBinaryComparator ascPolygonComp =
+ APolygonPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
final IBinaryComparator ascUUIDComp = AUUIDPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- final IBinaryComparator ascByteArrayComp = new PointableBinaryComparatorFactory(ByteArrayPointable.FACTORY)
- .createBinaryComparator();
+ final IBinaryComparator ascByteArrayComp =
+ new PointableBinaryComparatorFactory(ByteArrayPointable.FACTORY).createBinaryComparator();
final IBinaryComparator rawComp = RawBinaryComparatorFactory.INSTANCE.createBinaryComparator();
@Override
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
index e02ac3d..a8fa8a4 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/AObjectBinaryHashFunctionFactory.java
@@ -35,8 +35,8 @@
@Override
public IBinaryHashFunction createBinaryHashFunction() {
return new IBinaryHashFunction() {
- private IBinaryHashFunction genericBinaryHash = MurmurHash3BinaryHashFunctionFamily.INSTANCE
- .createBinaryHashFunction(0);
+ private IBinaryHashFunction genericBinaryHash =
+ MurmurHash3BinaryHashFunctionFamily.INSTANCE.createBinaryHashFunction(0);
@Override
public int hash(byte[] bytes, int offset, int length) throws HyracksDataException {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/ListItemBinaryHashFunctionFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/ListItemBinaryHashFunctionFactory.java
index d7cf9ae..a3a826e 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/ListItemBinaryHashFunctionFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/hash/ListItemBinaryHashFunctionFactory.java
@@ -53,10 +53,11 @@
public IBinaryHashFunction createBinaryHashFunction(final ATypeTag itemTypeTag, final boolean ignoreCase) {
return new IBinaryHashFunction() {
- private IBinaryHashFunction lowerCaseStringHash = new PointableBinaryHashFunctionFactory(
- UTF8StringLowercasePointable.FACTORY).createBinaryHashFunction();
- private IBinaryHashFunction genericBinaryHash = MurmurHash3BinaryHashFunctionFamily.INSTANCE
- .createBinaryHashFunction(0);
+ private IBinaryHashFunction lowerCaseStringHash =
+ new PointableBinaryHashFunctionFactory(UTF8StringLowercasePointable.FACTORY)
+ .createBinaryHashFunction();
+ private IBinaryHashFunction genericBinaryHash =
+ MurmurHash3BinaryHashFunctionFamily.INSTANCE.createBinaryHashFunction(0);
private GrowableArray taggedBytes = new GrowableArray();
@Override
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ADoublePrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ADoublePrinterFactory.java
index 49455d5..051d00b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ADoublePrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ADoublePrinterFactory.java
@@ -29,8 +29,7 @@
private static final long serialVersionUID = 1L;
public static final ADoublePrinterFactory INSTANCE = new ADoublePrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDouble(b, s,
- ps);
+ public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDouble(b, s, ps);
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AObjectPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AObjectPrinterFactory.java
index 9e81b19..21880dd 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AObjectPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AObjectPrinterFactory.java
@@ -123,12 +123,12 @@
@Override
public IPrinter createPrinter() {
- final ARecordVisitablePointable rPointable = new ARecordVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
- final AListVisitablePointable olPointable = new AListVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE);
- final AListVisitablePointable ulPointable = new AListVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE);
+ final ARecordVisitablePointable rPointable =
+ new ARecordVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+ final AListVisitablePointable olPointable =
+ new AListVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE);
+ final AListVisitablePointable ulPointable =
+ new AListVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE);
final Pair<PrintStream, ATypeTag> streamTag = new Pair<>(null, null);
final IPrintVisitor visitor = new APrintVisitor();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOptionalFieldPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOptionalFieldPrinterFactory.java
index 8797bdc..f44b834 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOptionalFieldPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOptionalFieldPrinterFactory.java
@@ -46,8 +46,7 @@
@Override
public void init() throws HyracksDataException {
- nullPrinter = (ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(BuiltinType.ANULL))
- .createPrinter();
+ nullPrinter = (ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(BuiltinType.ANULL)).createPrinter();
fieldPrinter = (ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(unionType.getActualType()))
.createPrinter();
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOrderedlistPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOrderedlistPrinterFactory.java
index 0eef522..322ce7c 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOrderedlistPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AOrderedlistPrinterFactory.java
@@ -44,8 +44,8 @@
@Override
public IPrinter createPrinter() {
final PointableAllocator allocator = new PointableAllocator();
- final IAType inputType = orderedlistType == null
- ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.ARRAY) : orderedlistType;
+ final IAType inputType = orderedlistType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.ARRAY)
+ : orderedlistType;
final IVisitablePointable listAccessor = allocator.allocateListValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ARecordPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ARecordPrinterFactory.java
index 0ea2d96..718a49c 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ARecordPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/ARecordPrinterFactory.java
@@ -44,8 +44,8 @@
@Override
public IPrinter createPrinter() {
final PointableAllocator allocator = new PointableAllocator();
- final IAType inputType = recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT)
- : recType;
+ final IAType inputType =
+ recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT) : recType;
final IVisitablePointable recAccessor = allocator.allocateRecordValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AUnionPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AUnionPrinterFactory.java
index d8a412f..105bef0 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AUnionPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/adm/AUnionPrinterFactory.java
@@ -49,8 +49,9 @@
unionList = unionType.getUnionList();
printers = new IPrinter[unionType.getUnionList().size()];
for (int i = 0; i < printers.length; i++) {
- printers[i] = (ADMPrinterFactoryProvider.INSTANCE
- .getPrinterFactory(unionType.getUnionList().get(i))).createPrinter();
+ printers[i] =
+ (ADMPrinterFactoryProvider.INSTANCE.getPrinterFactory(unionType.getUnionList().get(i)))
+ .createPrinter();
printers[i].init();
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ADoublePrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ADoublePrinterFactory.java
index 29f11c4..79d20cb 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ADoublePrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ADoublePrinterFactory.java
@@ -29,8 +29,7 @@
private static final long serialVersionUID = 1L;
public static final ADoublePrinterFactory INSTANCE = new ADoublePrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDouble(b, s,
- ps);
+ public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDouble(b, s, ps);
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt16PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt16PrinterFactory.java
index 8c6ec22..5de8aea 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt16PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt16PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt16PrinterFactory INSTANCE = new AInt16PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt16SerializerDeserializer.getShort(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt16SerializerDeserializer.getShort(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt32PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt32PrinterFactory.java
index ef3f9d9..c260b55 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt32PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt32PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt32PrinterFactory INSTANCE = new AInt32PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt32SerializerDeserializer.getInt(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt32SerializerDeserializer.getInt(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt64PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt64PrinterFactory.java
index 403d0f4..4a8dd8a 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt64PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt64PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt64PrinterFactory INSTANCE = new AInt64PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt64SerializerDeserializer.getLong(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt64SerializerDeserializer.getLong(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt8PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt8PrinterFactory.java
index c9d4165..8de0264 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt8PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AInt8PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt8PrinterFactory INSTANCE = new AInt8PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt8SerializerDeserializer.getByte(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt8SerializerDeserializer.getByte(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java
index 2fddc83..f1e2300 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AObjectPrinterFactory.java
@@ -119,8 +119,8 @@
@Override
public IPrinter createPrinter() {
- final ARecordVisitablePointable rPointable = new ARecordVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+ final ARecordVisitablePointable rPointable =
+ new ARecordVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
final Pair<PrintStream, ATypeTag> streamTag = new Pair<>(null, null);
final IPrintVisitor visitor = new APrintVisitor();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AOptionalFieldPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AOptionalFieldPrinterFactory.java
index ca01935..ef212f4 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AOptionalFieldPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AOptionalFieldPrinterFactory.java
@@ -46,8 +46,8 @@
@Override
public void init() throws HyracksDataException {
- nullPrinter = (CSVPrinterFactoryProvider.INSTANCE.getPrinterFactory(BuiltinType.AMISSING))
- .createPrinter();
+ nullPrinter =
+ (CSVPrinterFactoryProvider.INSTANCE.getPrinterFactory(BuiltinType.AMISSING)).createPrinter();
fieldPrinter = (CSVPrinterFactoryProvider.INSTANCE.getPrinterFactory(unionType.getActualType()))
.createPrinter();
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java
index bf55410..909fd60 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/ARecordPrinterFactory.java
@@ -44,8 +44,8 @@
@Override
public IPrinter createPrinter() {
final PointableAllocator allocator = new PointableAllocator();
- final IAType inputType = recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT)
- : recType;
+ final IAType inputType =
+ recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT) : recType;
final IVisitablePointable recAccessor = allocator.allocateRecordValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AUnionPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AUnionPrinterFactory.java
index 62cbf1a..28e0321 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AUnionPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/csv/AUnionPrinterFactory.java
@@ -51,8 +51,9 @@
unionList = unionType.getUnionList();
printers = new IPrinter[unionType.getUnionList().size()];
for (int i = 0; i < printers.length; i++) {
- printers[i] = (CSVPrinterFactoryProvider.INSTANCE
- .getPrinterFactory(unionType.getUnionList().get(i))).createPrinter();
+ printers[i] =
+ (CSVPrinterFactoryProvider.INSTANCE.getPrinterFactory(unionType.getUnionList().get(i)))
+ .createPrinter();
printers[i].init();
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ADoublePrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ADoublePrinterFactory.java
index 2081162..2b4d3f7 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ADoublePrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ADoublePrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final ADoublePrinterFactory INSTANCE = new ADoublePrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDoubleForJson(b,
- s, ps);
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDoubleForJson(b, s, ps);
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AFloatPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AFloatPrinterFactory.java
index 46ff2f7..31b1f97 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AFloatPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AFloatPrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AFloatPrinterFactory INSTANCE = new AFloatPrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printFloatForJson(b,
- s, ps);
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printFloatForJson(b, s, ps);
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt16PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt16PrinterFactory.java
index 4454aa6..6a6ae38 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt16PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt16PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt16PrinterFactory INSTANCE = new AInt16PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt16SerializerDeserializer.getShort(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt16SerializerDeserializer.getShort(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt32PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt32PrinterFactory.java
index 40bf33d..fc6943d 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt32PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt32PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt32PrinterFactory INSTANCE = new AInt32PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt32SerializerDeserializer.getInt(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt32SerializerDeserializer.getInt(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt64PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt64PrinterFactory.java
index 5fe8e3a..8aa5de1 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt64PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt64PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt64PrinterFactory INSTANCE = new AInt64PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt64SerializerDeserializer.getLong(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt64SerializerDeserializer.getLong(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt8PrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt8PrinterFactory.java
index a6c3c97..3552b96 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt8PrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AInt8PrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AInt8PrinterFactory INSTANCE = new AInt8PrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> ps
- .print(AInt8SerializerDeserializer.getByte(b, s + 1));
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> ps.print(AInt8SerializerDeserializer.getByte(b, s + 1));
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AObjectPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AObjectPrinterFactory.java
index 624613a..5cae68c 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AObjectPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AObjectPrinterFactory.java
@@ -120,12 +120,12 @@
@Override
public IPrinter createPrinter() {
- final ARecordVisitablePointable rPointable = new ARecordVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
- final AListVisitablePointable olPointable = new AListVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE);
- final AListVisitablePointable ulPointable = new AListVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE);
+ final ARecordVisitablePointable rPointable =
+ new ARecordVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+ final AListVisitablePointable olPointable =
+ new AListVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE);
+ final AListVisitablePointable ulPointable =
+ new AListVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE);
final Pair<PrintStream, ATypeTag> streamTag = new Pair<>(null, null);
final IPrintVisitor visitor = new APrintVisitor();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOptionalFieldPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOptionalFieldPrinterFactory.java
index c933c05..188773b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOptionalFieldPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOptionalFieldPrinterFactory.java
@@ -47,8 +47,8 @@
public void init() throws HyracksDataException {
nullPrinter = (CleanJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(BuiltinType.AMISSING))
.createPrinter();
- fieldPrinter = (CleanJSONPrinterFactoryProvider.INSTANCE
- .getPrinterFactory(unionType.getActualType())).createPrinter();
+ fieldPrinter = (CleanJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(unionType.getActualType()))
+ .createPrinter();
}
@Override
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOrderedlistPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOrderedlistPrinterFactory.java
index a6e3347..36e13fb 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOrderedlistPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/AOrderedlistPrinterFactory.java
@@ -44,8 +44,8 @@
@Override
public IPrinter createPrinter() {
final PointableAllocator allocator = new PointableAllocator();
- final IAType inputType = orderedlistType == null
- ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.ARRAY) : orderedlistType;
+ final IAType inputType = orderedlistType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.ARRAY)
+ : orderedlistType;
final IVisitablePointable listAccessor = allocator.allocateListValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ARecordPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ARecordPrinterFactory.java
index 430bf66..a188952 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ARecordPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/clean/ARecordPrinterFactory.java
@@ -44,8 +44,8 @@
@Override
public IPrinter createPrinter() {
final PointableAllocator allocator = new PointableAllocator();
- final IAType inputType = recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT)
- : recType;
+ final IAType inputType =
+ recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT) : recType;
final IVisitablePointable recAccessor = allocator.allocateRecordValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ADoublePrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ADoublePrinterFactory.java
index 5d74980..61444cd 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ADoublePrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ADoublePrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final ADoublePrinterFactory INSTANCE = new ADoublePrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDoubleForJson(b,
- s, ps);
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printDoubleForJson(b, s, ps);
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AFloatPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AFloatPrinterFactory.java
index 6ed3f98..13c23a5 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AFloatPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AFloatPrinterFactory.java
@@ -29,8 +29,8 @@
private static final long serialVersionUID = 1L;
public static final AFloatPrinterFactory INSTANCE = new AFloatPrinterFactory();
- public static final IPrinter PRINTER = (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printFloatForJson(b,
- s, ps);
+ public static final IPrinter PRINTER =
+ (byte[] b, int s, int l, PrintStream ps) -> PrintTools.printFloatForJson(b, s, ps);
@Override
public IPrinter createPrinter() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AObjectPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AObjectPrinterFactory.java
index 947229c..3c1c449 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AObjectPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AObjectPrinterFactory.java
@@ -120,12 +120,12 @@
@Override
public IPrinter createPrinter() {
- final ARecordVisitablePointable rPointable = new ARecordVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
- final AListVisitablePointable olPointable = new AListVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE);
- final AListVisitablePointable ulPointable = new AListVisitablePointable(
- DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE);
+ final ARecordVisitablePointable rPointable =
+ new ARecordVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE);
+ final AListVisitablePointable olPointable =
+ new AListVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_AORDERED_LIST_TYPE);
+ final AListVisitablePointable ulPointable =
+ new AListVisitablePointable(DefaultOpenFieldType.NESTED_OPEN_AUNORDERED_LIST_TYPE);
final Pair<PrintStream, ATypeTag> streamTag = new Pair<>(null, null);
final IPrintVisitor visitor = new APrintVisitor();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOptionalFieldPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOptionalFieldPrinterFactory.java
index a0ac3cd..62d0591 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOptionalFieldPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOptionalFieldPrinterFactory.java
@@ -47,8 +47,9 @@
public void init() throws HyracksDataException {
nullPrinter = (LosslessJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(BuiltinType.AMISSING))
.createPrinter();
- fieldPrinter = (LosslessJSONPrinterFactoryProvider.INSTANCE
- .getPrinterFactory(unionType.getActualType())).createPrinter();
+ fieldPrinter =
+ (LosslessJSONPrinterFactoryProvider.INSTANCE.getPrinterFactory(unionType.getActualType()))
+ .createPrinter();
}
@Override
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOrderedlistPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOrderedlistPrinterFactory.java
index 7c210b6..c5993e8 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOrderedlistPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/AOrderedlistPrinterFactory.java
@@ -44,8 +44,8 @@
@Override
public IPrinter createPrinter() {
PointableAllocator allocator = new PointableAllocator();
- final IAType inputType = orderedlistType == null
- ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.ARRAY) : orderedlistType;
+ final IAType inputType = orderedlistType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.ARRAY)
+ : orderedlistType;
final IVisitablePointable listAccessor = allocator.allocateListValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ARecordPrinterFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ARecordPrinterFactory.java
index 4bab638..8a3ec39 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ARecordPrinterFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/printers/json/lossless/ARecordPrinterFactory.java
@@ -44,8 +44,8 @@
@Override
public IPrinter createPrinter() {
final PointableAllocator allocator = new PointableAllocator();
- final IAType inputType = recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT)
- : recType;
+ final IAType inputType =
+ recType == null ? DefaultOpenFieldType.getDefaultOpenFieldType(ATypeTag.OBJECT) : recType;
final IVisitablePointable recAccessor = allocator.allocateRecordValue(inputType);
final APrintVisitor printVisitor = new APrintVisitor();
final Pair<PrintStream, ATypeTag> arg = new Pair<>(null, null);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
index fafa397..af36c78 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/serde/ARecordSerializerDeserializer.java
@@ -197,8 +197,8 @@
@SuppressWarnings({ "unchecked", "rawtypes" })
public static void serializeSchemalessRecord(ARecord record, DataOutput dataOutput, boolean writeTypeTag)
throws HyracksDataException {
- ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
RecordBuilder confRecordBuilder = new RecordBuilder();
confRecordBuilder.reset(RecordUtil.FULLY_OPEN_RECORD_TYPE);
ArrayBackedValueStorage fieldNameBytes = new ArrayBackedValueStorage();
@@ -219,8 +219,8 @@
@SuppressWarnings("unchecked")
public static void serializeSimpleSchemalessRecord(List<Pair<String, String>> record, DataOutput dataOutput,
boolean writeTypeTag) throws HyracksDataException {
- ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
RecordBuilder confRecordBuilder = new RecordBuilder();
confRecordBuilder.reset(RecordUtil.FULLY_OPEN_RECORD_TYPE);
ArrayBackedValueStorage fieldNameBytes = new ArrayBackedValueStorage();
@@ -317,11 +317,11 @@
int fieldUtflength = UTF8StringUtil.getUTFLength(fieldName, nstart + 1);
int fieldUtfMetaLen = UTF8StringUtil.getNumBytesToStoreLength(fieldUtflength);
- IBinaryHashFunction utf8HashFunction = BinaryHashFunctionFactoryProvider.UTF8STRING_POINTABLE_INSTANCE
- .createBinaryHashFunction();
+ IBinaryHashFunction utf8HashFunction =
+ BinaryHashFunctionFactoryProvider.UTF8STRING_POINTABLE_INSTANCE.createBinaryHashFunction();
- IBinaryComparator utf8BinaryComparator = BinaryComparatorFactoryProvider.UTF8STRING_POINTABLE_INSTANCE
- .createBinaryComparator();
+ IBinaryComparator utf8BinaryComparator =
+ BinaryComparatorFactoryProvider.UTF8STRING_POINTABLE_INSTANCE.createBinaryComparator();
int fieldNameHashCode = utf8HashFunction.hash(fieldName, nstart + 1, fieldUtflength + fieldUtfMetaLen);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/valueproviders/PrimitiveValueProviderFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/valueproviders/PrimitiveValueProviderFactory.java
index 1a7810e..abc235d 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/valueproviders/PrimitiveValueProviderFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/dataflow/data/nontagged/valueproviders/PrimitiveValueProviderFactory.java
@@ -39,12 +39,12 @@
@Override
public IPrimitiveValueProvider createPrimitiveValueProvider() {
return new IPrimitiveValueProvider() {
- final IPrimitiveValueProvider intProvider = IntegerPrimitiveValueProviderFactory.INSTANCE
- .createPrimitiveValueProvider();
- final IPrimitiveValueProvider floatProvider = FloatPrimitiveValueProviderFactory.INSTANCE
- .createPrimitiveValueProvider();
- final IPrimitiveValueProvider doubleProvider = DoublePrimitiveValueProviderFactory.INSTANCE
- .createPrimitiveValueProvider();
+ final IPrimitiveValueProvider intProvider =
+ IntegerPrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
+ final IPrimitiveValueProvider floatProvider =
+ FloatPrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
+ final IPrimitiveValueProvider doubleProvider =
+ DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
@Override
public double getValue(byte[] bytes, int offset) {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryTokenizerFactoryProvider.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryTokenizerFactoryProvider.java
index b5fd171..5ef7702 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryTokenizerFactoryProvider.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/BinaryTokenizerFactoryProvider.java
@@ -36,7 +36,7 @@
private static final IBinaryTokenizerFactory aqlStringTokenizer =
new DelimitedUTF8StringBinaryTokenizerFactory(true, true,
- new UTF8WordTokenFactory(ATypeTag.SERIALIZED_STRING_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG));
+ new UTF8WordTokenFactory(ATypeTag.SERIALIZED_STRING_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG));
private static final IBinaryTokenizerFactory aqlStringNoTypeTagTokenizer =
new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
@@ -46,11 +46,11 @@
new DelimitedUTF8StringBinaryTokenizerFactory(true, true, new HashedUTF8WordTokenFactory(
ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG));
- private static final IBinaryTokenizerFactory orderedListTokenizer = new AOrderedListBinaryTokenizerFactory(
- new AListElementTokenFactory());
+ private static final IBinaryTokenizerFactory orderedListTokenizer =
+ new AOrderedListBinaryTokenizerFactory(new AListElementTokenFactory());
- private static final IBinaryTokenizerFactory unorderedListTokenizer = new AUnorderedListBinaryTokenizerFactory(
- new AListElementTokenFactory());
+ private static final IBinaryTokenizerFactory unorderedListTokenizer =
+ new AUnorderedListBinaryTokenizerFactory(new AListElementTokenFactory());
@Override
public IBinaryTokenizerFactory getWordTokenizerFactory(ATypeTag typeTag, boolean hashedTokens,
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/LinearizeComparatorFactoryProvider.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/LinearizeComparatorFactoryProvider.java
index 3e54e99..91fb96a 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/LinearizeComparatorFactoryProvider.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/formats/nontagged/LinearizeComparatorFactoryProvider.java
@@ -52,8 +52,8 @@
|| typeTag == ATypeTag.BIGINT) {
return addOffset(new ZCurveIntComparatorFactory(dimension), ascending);
} else {
- throw new AlgebricksException("Cannot propose linearizer for key with type " + typeTag + " and dimension "
- + dimension + ".");
+ throw new AlgebricksException(
+ "Cannot propose linearizer for key with type " + typeTag + " and dimension " + dimension + ".");
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABinary.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABinary.java
index 4bcd4c9..ff32903 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABinary.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABinary.java
@@ -105,7 +105,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABoolean.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABoolean.java
index a0c572c..5589205 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABoolean.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ABoolean.java
@@ -74,7 +74,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
return new ObjectMapper().createObjectNode().put("ABoolean", bVal);
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ACircle.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ACircle.java
index 9c2369d..93f8663 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ACircle.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ACircle.java
@@ -75,7 +75,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADate.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADate.java
index 3d7639e..2a80f28 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADate.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADate.java
@@ -89,7 +89,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADateTime.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADateTime.java
index cf6fff0..efcb828 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADateTime.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADateTime.java
@@ -130,7 +130,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADayTimeDuration.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADayTimeDuration.java
index 95e677b..6017d4b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADayTimeDuration.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADayTimeDuration.java
@@ -41,7 +41,7 @@
* @see org.apache.hyracks.api.dataflow.value.JSONSerializable#toJSON()
*/
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADouble.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADouble.java
index 4c307f4..0a6c7d4 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADouble.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADouble.java
@@ -71,7 +71,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADuration.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADuration.java
index 618f130..5e22711 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADuration.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ADuration.java
@@ -110,7 +110,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AFloat.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AFloat.java
index 4ad1b50..a44034a 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AFloat.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AFloat.java
@@ -71,7 +71,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeneratedUUID.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeneratedUUID.java
index da1b35b..f0071b6 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeneratedUUID.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AGeneratedUUID.java
@@ -26,12 +26,13 @@
public class AGeneratedUUID extends AUUID {
private static class Holder {
- static final byte [] hostUnique = new byte[4];
+ static final byte[] hostUnique = new byte[4];
static {
new SecureRandom().nextBytes(hostUnique);
- }
+ }
}
+
static final Random random = new Random();
static final AtomicInteger nextInstance = new AtomicInteger(random.nextInt());
@@ -45,10 +46,10 @@
System.arraycopy(Holder.hostUnique, 0, uuidBytes, 0, 4);
// overwrite the next four bytes with the thread unique value
- uuidBytes[5] = (byte)(unique >> 24);
- uuidBytes[6] = (byte)(unique >> 16);
- uuidBytes[7] = (byte)(unique >> 8);
- uuidBytes[8] = (byte)unique;
+ uuidBytes[5] = (byte) (unique >> 24);
+ uuidBytes[6] = (byte) (unique >> 16);
+ uuidBytes[7] = (byte) (unique >> 8);
+ uuidBytes[8] = (byte) unique;
}
public void nextUUID() {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt16.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt16.java
index 65c53ac..727ba45 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt16.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt16.java
@@ -61,7 +61,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt32.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt32.java
index ae96578..2da4708 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt32.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt32.java
@@ -72,7 +72,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt64.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt64.java
index ef69856..5e20f67 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt64.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt64.java
@@ -60,7 +60,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt8.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt8.java
index 9d0f4b2..53ecd54 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt8.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInt8.java
@@ -60,7 +60,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInterval.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInterval.java
index 2d54786..150575f 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInterval.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AInterval.java
@@ -64,15 +64,15 @@
return false;
} else {
AInterval t = (AInterval) o;
- return (t.intervalStart == this.intervalStart || t.intervalEnd == this.intervalEnd
- && t.typetag == this.typetag);
+ return (t.intervalStart == this.intervalStart
+ || t.intervalEnd == this.intervalEnd && t.typetag == this.typetag);
}
}
@Override
public int hashCode() {
- return (int) (((int) (this.intervalStart ^ (this.intervalStart >>> 32))) * 31 + (int) (this.intervalEnd ^ (this.intervalEnd >>> 32)))
- * 31 + (int) this.typetag;
+ return (int) (((int) (this.intervalStart ^ (this.intervalStart >>> 32))) * 31
+ + (int) (this.intervalEnd ^ (this.intervalEnd >>> 32))) * 31 + (int) this.typetag;
}
/* (non-Javadoc)
@@ -99,9 +99,8 @@
if (typetag == ATypeTag.DATE.serialize()) {
sbder.append("date: { ");
- GregorianCalendarSystem.getInstance().getExtendStringRepUntilField(
- intervalStart * ADate.CHRONON_OF_DAY, 0, sbder, GregorianCalendarSystem.Fields.YEAR,
- GregorianCalendarSystem.Fields.DAY, false);
+ GregorianCalendarSystem.getInstance().getExtendStringRepUntilField(intervalStart * ADate.CHRONON_OF_DAY,
+ 0, sbder, GregorianCalendarSystem.Fields.YEAR, GregorianCalendarSystem.Fields.DAY, false);
sbder.append(" }, date: {");
GregorianCalendarSystem.getInstance().getExtendStringRepUntilField(intervalEnd * ADate.CHRONON_OF_DAY,
@@ -145,7 +144,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
// TODO(madhusudancs): Remove this method when a printer based JSON serializer is implemented.
return null;
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ALine.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ALine.java
index 67ae2d5..66ec23e 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ALine.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ALine.java
@@ -68,7 +68,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMissing.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMissing.java
index a404032..e38dc76 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMissing.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMissing.java
@@ -52,7 +52,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableUUID.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableUUID.java
index c9e581c..2fb69ab 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableUUID.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AMutableUUID.java
@@ -27,9 +27,9 @@
if (tokenImage.length() != UUID_CHARS) {
throw new HyracksDataException("This is not a correct UUID value: " + tokenImage);
}
- byte [] hexBytes = new byte[UUID_CHARS];
+ byte[] hexBytes = new byte[UUID_CHARS];
for (int i = 0; i < tokenImage.length(); i++) {
- hexBytes[i] = (byte)tokenImage.charAt(i);
+ hexBytes[i] = (byte) tokenImage.charAt(i);
}
parseUUIDHexBytes(hexBytes, 0);
}
@@ -68,9 +68,9 @@
}
// Calculate a long value from a hex string.
- private static void decodeBytesFromHex(byte[] hexArray, int hexArrayOffset, byte[] outputArray, int outputOffset, int length)
- throws HyracksDataException {
- for (int i = hexArrayOffset; i < hexArrayOffset + length; ) {
+ private static void decodeBytesFromHex(byte[] hexArray, int hexArrayOffset, byte[] outputArray, int outputOffset,
+ int length) throws HyracksDataException {
+ for (int i = hexArrayOffset; i < hexArrayOffset + length;) {
int hi = transformHexCharToInt(hexArray[i++]);
outputArray[outputOffset++] = (byte) (hi << 4 | transformHexCharToInt(hexArray[i++]));
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ANull.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ANull.java
index 6be4832..ee7b822 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ANull.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ANull.java
@@ -52,7 +52,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
json.put("ANull", "null");
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AOrderedList.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AOrderedList.java
index 3c1a165..785c62d 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AOrderedList.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AOrderedList.java
@@ -120,7 +120,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint.java
index 66e96f0..d2d45f9 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint.java
@@ -68,7 +68,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
ObjectNode point = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint3D.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint3D.java
index 9a4a06e..81f8a6b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint3D.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APoint3D.java
@@ -74,7 +74,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APolygon.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APolygon.java
index eb4f1b4..3cb0f5c 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APolygon.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/APolygon.java
@@ -88,7 +88,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARecord.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARecord.java
index aed969c..d964079 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARecord.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARecord.java
@@ -99,7 +99,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARectangle.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARectangle.java
index d6da9e5..8a527c9 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARectangle.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ARectangle.java
@@ -68,7 +68,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AString.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AString.java
index aefe5f7..e6b8f7c8 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AString.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AString.java
@@ -72,7 +72,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ATime.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ATime.java
index a14d386..5375f7e 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ATime.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/ATime.java
@@ -105,7 +105,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUUID.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUUID.java
index 0ed9d3f..433ff8b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUUID.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUUID.java
@@ -56,7 +56,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
json.put("AUUID", toString());
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUnorderedList.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUnorderedList.java
index 894421c..137171d 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUnorderedList.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AUnorderedList.java
@@ -103,7 +103,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AYearMonthDuration.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AYearMonthDuration.java
index 8321848..fd35adf 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AYearMonthDuration.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/AYearMonthDuration.java
@@ -55,7 +55,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADateTimeParserFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADateTimeParserFactory.java
index b5cac1b..9cee5869 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADateTimeParserFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADateTimeParserFactory.java
@@ -59,8 +59,8 @@
chrononTimeInMs = ADateParserFactory.parseDatePart(buffer, start, timeOffset);
- chrononTimeInMs += ATimeParserFactory.parseTimePart(buffer, start + timeOffset + 1, length - timeOffset
- - 1);
+ chrononTimeInMs +=
+ ATimeParserFactory.parseTimePart(buffer, start + timeOffset + 1, length - timeOffset - 1);
try {
out.writeLong(chrononTimeInMs);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADurationParserFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADurationParserFactory.java
index cca37e4..6e7721f 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADurationParserFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ADurationParserFactory.java
@@ -35,7 +35,8 @@
private static final long serialVersionUID = 1L;
- private static final String durationErrorMessage = "Wrong Input Format for a duration/year-month-duration/day-time-duration Value";
+ private static final String durationErrorMessage =
+ "Wrong Input Format for a duration/year-month-duration/day-time-duration Value";
private static final String onlyYearMonthErrorMessage = "Only year-month fields are allowed";
private static final String onlyDayTimeErrorMessage = "Only day-time fields are allowed";
@@ -214,11 +215,11 @@
if (charAccessor.getCharAt(offset + i) >= '0'
&& charAccessor.getCharAt(offset + i) <= '9') {
if (i < 4) {
- millisecond = millisecond * DECIMAL_UNIT
- + (charAccessor.getCharAt(offset + i) - '0');
+ millisecond =
+ millisecond * DECIMAL_UNIT + (charAccessor.getCharAt(offset + i) - '0');
} else {
- throw new HyracksDataException(durationErrorMessage
- + ": wrong MILLISECOND field.");
+ throw new HyracksDataException(
+ durationErrorMessage + ": wrong MILLISECOND field.");
}
} else {
break;
@@ -253,10 +254,9 @@
}
int totalMonths = sign * (year * 12 + month);
- long totalMilliseconds = sign
- * (day * GregorianCalendarSystem.CHRONON_OF_DAY + hour * GregorianCalendarSystem.CHRONON_OF_HOUR
- + minute * GregorianCalendarSystem.CHRONON_OF_MINUTE + second
- * GregorianCalendarSystem.CHRONON_OF_SECOND + millisecond);
+ long totalMilliseconds = sign * (day * GregorianCalendarSystem.CHRONON_OF_DAY
+ + hour * GregorianCalendarSystem.CHRONON_OF_HOUR + minute * GregorianCalendarSystem.CHRONON_OF_MINUTE
+ + second * GregorianCalendarSystem.CHRONON_OF_SECOND + millisecond);
if (sign > 0) {
if (totalMonths < 0) {
@@ -264,9 +264,8 @@
+ ": total number of months is beyond its max value (-2147483647 to 2147483647).");
}
if (totalMilliseconds < 0) {
- throw new HyracksDataException(
- durationErrorMessage
- + ": total number of milliseconds is beyond its max value (-9223372036854775808 to 9223372036854775807).");
+ throw new HyracksDataException(durationErrorMessage
+ + ": total number of milliseconds is beyond its max value (-9223372036854775808 to 9223372036854775807).");
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ATimeParserFactory.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ATimeParserFactory.java
index 2b97005..e14720a 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ATimeParserFactory.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/ATimeParserFactory.java
@@ -146,7 +146,8 @@
}
// error is thrown if more than three digits are seen for the millisecond part
- if (length > offset && timeString.charAt(start + offset) >= '0' && timeString.charAt(start + offset) <= '9') {
+ if (length > offset && timeString.charAt(start + offset) >= '0'
+ && timeString.charAt(start + offset) <= '9') {
throw new HyracksDataException(timeErrorMessage + ": too many fields for millisecond.");
}
}
@@ -188,16 +189,17 @@
int temp_offset = (timeString.charAt(start + 3) == ':') ? 1 : 0;
for (int i = 0; i < 2; i++) {
- if ((timeString.charAt(start + temp_offset + 3 + i) >= '0' && timeString.charAt(start + temp_offset + 3
- + i) <= '9')) {
- timezoneMinute = (short) (timezoneMinute * 10 + timeString.charAt(start + temp_offset + 3 + i) - '0');
+ if ((timeString.charAt(start + temp_offset + 3 + i) >= '0'
+ && timeString.charAt(start + temp_offset + 3 + i) <= '9')) {
+ timezoneMinute =
+ (short) (timezoneMinute * 10 + timeString.charAt(start + temp_offset + 3 + i) - '0');
} else {
throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in timezone minute field");
}
}
- timezone = (int) (timezoneHour * GregorianCalendarSystem.CHRONON_OF_HOUR + timezoneMinute
- * GregorianCalendarSystem.CHRONON_OF_MINUTE);
+ timezone = (int) (timezoneHour * GregorianCalendarSystem.CHRONON_OF_HOUR
+ + timezoneMinute * GregorianCalendarSystem.CHRONON_OF_MINUTE);
if (timeString.charAt(start) == '+') {
timezone *= -1;
@@ -343,15 +345,16 @@
int temp_offset = (timeString[start + 3] == ':') ? 1 : 0;
for (int i = 0; i < 2; i++) {
- if ((timeString[start + temp_offset + 3 + i] >= '0' && timeString[start + temp_offset + 3 + i] <= '9')) {
+ if ((timeString[start + temp_offset + 3 + i] >= '0'
+ && timeString[start + temp_offset + 3 + i] <= '9')) {
timezoneMinute = (short) (timezoneMinute * 10 + timeString[start + temp_offset + 3 + i] - '0');
} else {
throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in timezone minute field");
}
}
- timezone = (int) (timezoneHour * GregorianCalendarSystem.CHRONON_OF_HOUR + timezoneMinute
- * GregorianCalendarSystem.CHRONON_OF_MINUTE);
+ timezone = (int) (timezoneHour * GregorianCalendarSystem.CHRONON_OF_HOUR
+ + timezoneMinute * GregorianCalendarSystem.CHRONON_OF_MINUTE);
if (timeString[start] == '+') {
timezone *= -1;
@@ -497,15 +500,16 @@
int temp_offset = (timeString[start + 3] == ':') ? 1 : 0;
for (int i = 0; i < 2; i++) {
- if ((timeString[start + temp_offset + 3 + i] >= '0' && timeString[start + temp_offset + 3 + i] <= '9')) {
+ if ((timeString[start + temp_offset + 3 + i] >= '0'
+ && timeString[start + temp_offset + 3 + i] <= '9')) {
timezoneMinute = (short) (timezoneMinute * 10 + timeString[start + temp_offset + 3 + i] - '0');
} else {
throw new HyracksDataException(timeErrorMessage + ": Non-numeric value in timezone minute field");
}
}
- timezone = (int) (timezoneHour * GregorianCalendarSystem.CHRONON_OF_HOUR + timezoneMinute
- * GregorianCalendarSystem.CHRONON_OF_MINUTE);
+ timezone = (int) (timezoneHour * GregorianCalendarSystem.CHRONON_OF_HOUR
+ + timezoneMinute * GregorianCalendarSystem.CHRONON_OF_MINUTE);
if (timeString[start] == '+') {
timezone *= -1;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/DateTimeFormatUtils.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/DateTimeFormatUtils.java
index 1051458..300d696 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/DateTimeFormatUtils.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/DateTimeFormatUtils.java
@@ -308,8 +308,8 @@
break;
case DAY_CHAR:
processState = DateTimeProcessState.DAY;
- pointerMove = parseFormatField(format, formatStart, formatLength, formatPointer, DAY_CHAR,
- MAX_DAY_CHARS);
+ pointerMove =
+ parseFormatField(format, formatStart, formatLength, formatPointer, DAY_CHAR, MAX_DAY_CHARS);
formatPointer += pointerMove;
formatCharCopies += pointerMove;
break;
@@ -762,8 +762,8 @@
break;
case DAY_CHAR:
processState = DateTimeProcessState.DAY;
- pointerMove = parseFormatField(format, formatStart, formatLength, formatPointer, DAY_CHAR,
- MAX_DAY_CHARS);
+ pointerMove =
+ parseFormatField(format, formatStart, formatLength, formatPointer, DAY_CHAR, MAX_DAY_CHARS);
formatPointer += pointerMove;
formatCharCopies += pointerMove;
break;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/GregorianCalendarSystem.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/GregorianCalendarSystem.java
index 7f79478..dd711a8 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/GregorianCalendarSystem.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/base/temporal/GregorianCalendarSystem.java
@@ -212,8 +212,8 @@
*/
public long getChronon(int year, int month, int day, int hour, int min, int sec, int millis, int timezone) {
// Added milliseconds for all fields but month and day
- long chrononTime = chrononizeBeginningOfYear(year) + hour * CHRONON_OF_HOUR + min * CHRONON_OF_MINUTE + sec
- * CHRONON_OF_SECOND + millis + timezone;
+ long chrononTime = chrononizeBeginningOfYear(year) + hour * CHRONON_OF_HOUR + min * CHRONON_OF_MINUTE
+ + sec * CHRONON_OF_SECOND + millis + timezone;
// Added milliseconds for days of the month.
chrononTime += (day - 1 + DAYS_SINCE_MONTH_BEGIN_ORDI[month - 1]) * CHRONON_OF_DAY;
@@ -238,8 +238,8 @@
*/
public int getChronon(int hour, int min, int sec, int millis, int timezone) {
// Added milliseconds for all fields but month and day
- long chrononTime = hour * CHRONON_OF_HOUR + min * CHRONON_OF_MINUTE + sec * CHRONON_OF_SECOND + millis
- + timezone;
+ long chrononTime =
+ hour * CHRONON_OF_HOUR + min * CHRONON_OF_MINUTE + sec * CHRONON_OF_SECOND + millis + timezone;
return (int) chrononTime;
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/exceptions/InvalidExpressionException.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/exceptions/InvalidExpressionException.java
index c84b9e2..699629b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/exceptions/InvalidExpressionException.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/exceptions/InvalidExpressionException.java
@@ -32,15 +32,13 @@
public InvalidExpressionException(FunctionIdentifier fid, int index, ILogicalExpression actualExpr,
LogicalExpressionTag... exprKinds) {
- super(ErrorCode.COMPILATION_INVALID_EXPRESSION, fid.getName(), indexToPosition(index),
- actualExpr.toString(),
+ super(ErrorCode.COMPILATION_INVALID_EXPRESSION, fid.getName(), indexToPosition(index), actualExpr.toString(),
toExpectedTypeString(exprKinds));
}
public InvalidExpressionException(String functionName, int index, ILogicalExpression actualExpr,
LogicalExpressionTag... exprKinds) {
- super(ErrorCode.COMPILATION_INVALID_EXPRESSION, functionName, indexToPosition(index),
- actualExpr.toString(),
+ super(ErrorCode.COMPILATION_INVALID_EXPRESSION, functionName, indexToPosition(index), actualExpr.toString(),
toExpectedTypeString(exprKinds));
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
index dde61f9..d749899 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/BuiltinFunctions.java
@@ -149,395 +149,395 @@
private static final Map<IFunctionInfo, SpatialFilterKind> spatialFilterFunctions = new HashMap<>();
public static final FunctionIdentifier TYPE_OF = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "type-of", 1);
- public static final FunctionIdentifier GET_HANDLE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-handle", 2);
- public static final FunctionIdentifier GET_DATA = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-data",
- 2);
+ public static final FunctionIdentifier GET_HANDLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-handle", 2);
+ public static final FunctionIdentifier GET_DATA =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-data", 2);
- public static final FunctionIdentifier GET_ITEM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-item",
- 2);
- public static final FunctionIdentifier ANY_COLLECTION_MEMBER = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "any-collection-member", 1);
+ public static final FunctionIdentifier GET_ITEM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-item", 2);
+ public static final FunctionIdentifier ANY_COLLECTION_MEMBER =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "any-collection-member", 1);
public static final FunctionIdentifier LISTIFY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "listify", 1);
public static final FunctionIdentifier LEN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "len", 1);
- public static final FunctionIdentifier CONCAT_NON_NULL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "concat-non-null", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier EMPTY_STREAM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "empty-stream", 0);
- public static final FunctionIdentifier NON_EMPTY_STREAM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "non-empty-stream", 0);
+ public static final FunctionIdentifier CONCAT_NON_NULL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "concat-non-null", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier EMPTY_STREAM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "empty-stream", 0);
+ public static final FunctionIdentifier NON_EMPTY_STREAM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "non-empty-stream", 0);
public static final FunctionIdentifier ORDERED_LIST_CONSTRUCTOR = new FunctionIdentifier(
FunctionConstants.ASTERIX_NS, "ordered-list-constructor", FunctionIdentifier.VARARGS);
public static final FunctionIdentifier UNORDERED_LIST_CONSTRUCTOR = new FunctionIdentifier(
FunctionConstants.ASTERIX_NS, "unordered-list-constructor", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier DEEP_EQUAL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "deep-equal", 2);
+ public static final FunctionIdentifier DEEP_EQUAL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "deep-equal", 2);
// objects
- public static final FunctionIdentifier RECORD_MERGE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "object-merge", 2);
- public static final FunctionIdentifier REMOVE_FIELDS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "object-remove-fields", 2);
- public static final FunctionIdentifier ADD_FIELDS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "object-add-fields", 2);
+ public static final FunctionIdentifier RECORD_MERGE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "object-merge", 2);
+ public static final FunctionIdentifier REMOVE_FIELDS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "object-remove-fields", 2);
+ public static final FunctionIdentifier ADD_FIELDS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "object-add-fields", 2);
public static final FunctionIdentifier CLOSED_RECORD_CONSTRUCTOR = new FunctionIdentifier(
FunctionConstants.ASTERIX_NS, "closed-object-constructor", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier OPEN_RECORD_CONSTRUCTOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "open-object-constructor", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier FIELD_ACCESS_BY_INDEX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "field-access-by-index", 2);
- public static final FunctionIdentifier FIELD_ACCESS_BY_NAME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "field-access-by-name", 2);
- public static final FunctionIdentifier FIELD_ACCESS_NESTED = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "field-access-nested", 2);
- public static final FunctionIdentifier GET_RECORD_FIELDS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-object-fields", 1);
- public static final FunctionIdentifier GET_RECORD_FIELD_VALUE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-object-field-value", 2);
- public static final FunctionIdentifier RECORD_PAIRS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "object-pairs", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier OPEN_RECORD_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "open-object-constructor", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier FIELD_ACCESS_BY_INDEX =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "field-access-by-index", 2);
+ public static final FunctionIdentifier FIELD_ACCESS_BY_NAME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "field-access-by-name", 2);
+ public static final FunctionIdentifier FIELD_ACCESS_NESTED =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "field-access-nested", 2);
+ public static final FunctionIdentifier GET_RECORD_FIELDS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-object-fields", 1);
+ public static final FunctionIdentifier GET_RECORD_FIELD_VALUE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-object-field-value", 2);
+ public static final FunctionIdentifier RECORD_PAIRS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "object-pairs", FunctionIdentifier.VARARGS);
// numeric
- public static final FunctionIdentifier NUMERIC_UNARY_MINUS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "numeric-unary-minus", 1);
- public static final FunctionIdentifier NUMERIC_SUBTRACT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "numeric-subtract", 2);
- public static final FunctionIdentifier NUMERIC_MULTIPLY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "numeric-multiply", 2);
- public static final FunctionIdentifier NUMERIC_DIVIDE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "numeric-divide", 2);
- public static final FunctionIdentifier NUMERIC_MOD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "numeric-mod", 2);
- public static final FunctionIdentifier NUMERIC_IDIV = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "numeric-idiv", 2);
+ public static final FunctionIdentifier NUMERIC_UNARY_MINUS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "numeric-unary-minus", 1);
+ public static final FunctionIdentifier NUMERIC_SUBTRACT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "numeric-subtract", 2);
+ public static final FunctionIdentifier NUMERIC_MULTIPLY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "numeric-multiply", 2);
+ public static final FunctionIdentifier NUMERIC_DIVIDE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "numeric-divide", 2);
+ public static final FunctionIdentifier NUMERIC_MOD =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "numeric-mod", 2);
+ public static final FunctionIdentifier NUMERIC_IDIV =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "numeric-idiv", 2);
public static final FunctionIdentifier CARET = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "caret", 2);
public static final FunctionIdentifier NUMERIC_ABS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "abs", 1);
- public static final FunctionIdentifier NUMERIC_ACOS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "acos",
- 1);
- public static final FunctionIdentifier NUMERIC_ASIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "asin",
- 1);
- public static final FunctionIdentifier NUMERIC_ATAN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "atan",
- 1);
- public static final FunctionIdentifier NUMERIC_ATAN2 = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "atan2",
- 2);
+ public static final FunctionIdentifier NUMERIC_ACOS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "acos", 1);
+ public static final FunctionIdentifier NUMERIC_ASIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "asin", 1);
+ public static final FunctionIdentifier NUMERIC_ATAN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "atan", 1);
+ public static final FunctionIdentifier NUMERIC_ATAN2 =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "atan2", 2);
public static final FunctionIdentifier NUMERIC_COS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "cos", 1);
public static final FunctionIdentifier NUMERIC_SIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sin", 1);
public static final FunctionIdentifier NUMERIC_TAN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "tan", 1);
public static final FunctionIdentifier NUMERIC_EXP = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "exp", 1);
public static final FunctionIdentifier NUMERIC_LN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ln", 1);
public static final FunctionIdentifier NUMERIC_LOG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "log", 1);
- public static final FunctionIdentifier NUMERIC_SQRT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sqrt",
- 1);
- public static final FunctionIdentifier NUMERIC_SIGN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sign",
- 1);
+ public static final FunctionIdentifier NUMERIC_SQRT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sqrt", 1);
+ public static final FunctionIdentifier NUMERIC_SIGN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sign", 1);
- public static final FunctionIdentifier NUMERIC_CEILING = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "ceiling", 1);
- public static final FunctionIdentifier NUMERIC_FLOOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "floor",
- 1);
- public static final FunctionIdentifier NUMERIC_ROUND = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "round",
- 1);
- public static final FunctionIdentifier NUMERIC_ROUND_HALF_TO_EVEN = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "round-half-to-even", 1);
- public static final FunctionIdentifier NUMERIC_ROUND_HALF_TO_EVEN2 = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "round-half-to-even", 2);
- public static final FunctionIdentifier NUMERIC_TRUNC = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "trunc",
- 2);
+ public static final FunctionIdentifier NUMERIC_CEILING =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ceiling", 1);
+ public static final FunctionIdentifier NUMERIC_FLOOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "floor", 1);
+ public static final FunctionIdentifier NUMERIC_ROUND =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "round", 1);
+ public static final FunctionIdentifier NUMERIC_ROUND_HALF_TO_EVEN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "round-half-to-even", 1);
+ public static final FunctionIdentifier NUMERIC_ROUND_HALF_TO_EVEN2 =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "round-half-to-even", 2);
+ public static final FunctionIdentifier NUMERIC_TRUNC =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "trunc", 2);
// binary functions
- public static final FunctionIdentifier BINARY_LENGTH = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "binary-length", 1);
- public static final FunctionIdentifier PARSE_BINARY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "parse-binary", 2);
- public static final FunctionIdentifier PRINT_BINARY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "print-binary", 2);
- public static final FunctionIdentifier BINARY_CONCAT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "binary-concat", 1);
- public static final FunctionIdentifier SUBBINARY_FROM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sub-binary", 2);
- public static final FunctionIdentifier SUBBINARY_FROM_TO = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sub-binary", 3);
- public static final FunctionIdentifier FIND_BINARY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "find-binary", 2);
- public static final FunctionIdentifier FIND_BINARY_FROM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "find-binary", 3);
+ public static final FunctionIdentifier BINARY_LENGTH =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "binary-length", 1);
+ public static final FunctionIdentifier PARSE_BINARY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "parse-binary", 2);
+ public static final FunctionIdentifier PRINT_BINARY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "print-binary", 2);
+ public static final FunctionIdentifier BINARY_CONCAT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "binary-concat", 1);
+ public static final FunctionIdentifier SUBBINARY_FROM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sub-binary", 2);
+ public static final FunctionIdentifier SUBBINARY_FROM_TO =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sub-binary", 3);
+ public static final FunctionIdentifier FIND_BINARY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "find-binary", 2);
+ public static final FunctionIdentifier FIND_BINARY_FROM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "find-binary", 3);
// String funcitons
- public static final FunctionIdentifier STRING_EQUAL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "string-equal", 2);
- public static final FunctionIdentifier STRING_MATCHES = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "matches", 2);
- public static final FunctionIdentifier STRING_MATCHES_WITH_FLAG = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "matches", 3);
- public static final FunctionIdentifier STRING_REGEXP_LIKE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "regexp-like", 2);
- public static final FunctionIdentifier STRING_REGEXP_LIKE_WITH_FLAG = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "regexp-like", 3);
- public static final FunctionIdentifier STRING_REGEXP_POSITION = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "regexp-position", 2);
- public static final FunctionIdentifier STRING_REGEXP_POSITION_WITH_FLAG = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "regexp-position", 3);
- public static final FunctionIdentifier STRING_LOWERCASE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "lowercase", 1);
- public static final FunctionIdentifier STRING_UPPERCASE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "uppercase", 1);
- public static final FunctionIdentifier STRING_INITCAP = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "initcap", 1);
- public static final FunctionIdentifier STRING_TRIM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "trim",
- 1);
- public static final FunctionIdentifier STRING_LTRIM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ltrim",
- 1);
- public static final FunctionIdentifier STRING_RTRIM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "rtrim",
- 1);
- public static final FunctionIdentifier STRING_TRIM2 = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "trim",
- 2);
- public static final FunctionIdentifier STRING_LTRIM2 = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "ltrim", 2);
- public static final FunctionIdentifier STRING_RTRIM2 = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "rtrim", 2);
- public static final FunctionIdentifier STRING_POSITION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "position", 2);
- public static final FunctionIdentifier STRING_REPLACE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "replace", 3);
- public static final FunctionIdentifier STRING_REPLACE_WITH_FLAG = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "replace", 4);
- public static final FunctionIdentifier STRING_LENGTH = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "string-length", 1);
- public static final FunctionIdentifier STRING_LIKE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "like",
- 2);
- public static final FunctionIdentifier STRING_CONTAINS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "contains", 2);
- public static final FunctionIdentifier STRING_STARTS_WITH = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "starts-with", 2);
- public static final FunctionIdentifier STRING_ENDS_WITH = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "ends-with", 2);
- public static final FunctionIdentifier SUBSTRING = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "substring",
- 3);
- public static final FunctionIdentifier SUBSTRING2 = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "substring", 2);
- public static final FunctionIdentifier SUBSTRING_BEFORE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "substring-before", 2);
- public static final FunctionIdentifier SUBSTRING_AFTER = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "substring-after", 2);
- public static final FunctionIdentifier STRING_TO_CODEPOINT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "string-to-codepoint", 1);
- public static final FunctionIdentifier CODEPOINT_TO_STRING = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "codepoint-to-string", 1);
- public static final FunctionIdentifier STRING_CONCAT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "string-concat", 1);
- public static final FunctionIdentifier STRING_JOIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "string-join", 2);
- public static final FunctionIdentifier STRING_REPEAT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "repeat", 2);
- public static final FunctionIdentifier STRING_SPLIT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "split",
- 2);
+ public static final FunctionIdentifier STRING_EQUAL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "string-equal", 2);
+ public static final FunctionIdentifier STRING_MATCHES =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "matches", 2);
+ public static final FunctionIdentifier STRING_MATCHES_WITH_FLAG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "matches", 3);
+ public static final FunctionIdentifier STRING_REGEXP_LIKE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "regexp-like", 2);
+ public static final FunctionIdentifier STRING_REGEXP_LIKE_WITH_FLAG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "regexp-like", 3);
+ public static final FunctionIdentifier STRING_REGEXP_POSITION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "regexp-position", 2);
+ public static final FunctionIdentifier STRING_REGEXP_POSITION_WITH_FLAG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "regexp-position", 3);
+ public static final FunctionIdentifier STRING_LOWERCASE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "lowercase", 1);
+ public static final FunctionIdentifier STRING_UPPERCASE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "uppercase", 1);
+ public static final FunctionIdentifier STRING_INITCAP =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "initcap", 1);
+ public static final FunctionIdentifier STRING_TRIM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "trim", 1);
+ public static final FunctionIdentifier STRING_LTRIM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ltrim", 1);
+ public static final FunctionIdentifier STRING_RTRIM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "rtrim", 1);
+ public static final FunctionIdentifier STRING_TRIM2 =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "trim", 2);
+ public static final FunctionIdentifier STRING_LTRIM2 =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ltrim", 2);
+ public static final FunctionIdentifier STRING_RTRIM2 =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "rtrim", 2);
+ public static final FunctionIdentifier STRING_POSITION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "position", 2);
+ public static final FunctionIdentifier STRING_REPLACE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "replace", 3);
+ public static final FunctionIdentifier STRING_REPLACE_WITH_FLAG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "replace", 4);
+ public static final FunctionIdentifier STRING_LENGTH =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "string-length", 1);
+ public static final FunctionIdentifier STRING_LIKE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "like", 2);
+ public static final FunctionIdentifier STRING_CONTAINS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "contains", 2);
+ public static final FunctionIdentifier STRING_STARTS_WITH =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "starts-with", 2);
+ public static final FunctionIdentifier STRING_ENDS_WITH =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ends-with", 2);
+ public static final FunctionIdentifier SUBSTRING =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "substring", 3);
+ public static final FunctionIdentifier SUBSTRING2 =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "substring", 2);
+ public static final FunctionIdentifier SUBSTRING_BEFORE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "substring-before", 2);
+ public static final FunctionIdentifier SUBSTRING_AFTER =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "substring-after", 2);
+ public static final FunctionIdentifier STRING_TO_CODEPOINT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "string-to-codepoint", 1);
+ public static final FunctionIdentifier CODEPOINT_TO_STRING =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "codepoint-to-string", 1);
+ public static final FunctionIdentifier STRING_CONCAT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "string-concat", 1);
+ public static final FunctionIdentifier STRING_JOIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "string-join", 2);
+ public static final FunctionIdentifier STRING_REPEAT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "repeat", 2);
+ public static final FunctionIdentifier STRING_SPLIT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "split", 2);
public static final FunctionIdentifier DATASET = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "dataset", 1);
- public static final FunctionIdentifier FEED_COLLECT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "feed-collect", 6);
- public static final FunctionIdentifier FEED_INTERCEPT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "feed-intercept", 1);
+ public static final FunctionIdentifier FEED_COLLECT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "feed-collect", 6);
+ public static final FunctionIdentifier FEED_INTERCEPT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "feed-intercept", 1);
- public static final FunctionIdentifier INDEX_SEARCH = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "index-search", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier INDEX_SEARCH =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "index-search", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier MAKE_FIELD_INDEX_HANDLE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "make-field-index-handle", 2);
- public static final FunctionIdentifier MAKE_FIELD_NESTED_HANDLE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "make-field-nested-handle", 3);
- public static final FunctionIdentifier MAKE_FIELD_NAME_HANDLE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "make-field-name-handle", 1);
+ public static final FunctionIdentifier MAKE_FIELD_INDEX_HANDLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "make-field-index-handle", 2);
+ public static final FunctionIdentifier MAKE_FIELD_NESTED_HANDLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "make-field-nested-handle", 3);
+ public static final FunctionIdentifier MAKE_FIELD_NAME_HANDLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "make-field-name-handle", 1);
public static final FunctionIdentifier AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-avg", 1);
public static final FunctionIdentifier COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-count", 1);
public static final FunctionIdentifier SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sum", 1);
- public static final FunctionIdentifier LOCAL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-sum", 1);
+ public static final FunctionIdentifier LOCAL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-sum", 1);
public static final FunctionIdentifier MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-max", 1);
- public static final FunctionIdentifier LOCAL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-max", 1);
+ public static final FunctionIdentifier LOCAL_MAX =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-max", 1);
public static final FunctionIdentifier MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-min", 1);
- public static final FunctionIdentifier LOCAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-min", 1);
- public static final FunctionIdentifier GLOBAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-global-avg", 1);
- public static final FunctionIdentifier INTERMEDIATE_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-intermediate-avg", 1);
- public static final FunctionIdentifier LOCAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-avg", 1);
- public static final FunctionIdentifier FIRST_ELEMENT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-first-element", 1);
- public static final FunctionIdentifier LOCAL_FIRST_ELEMENT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-first-element", 1);
+ public static final FunctionIdentifier LOCAL_MIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-min", 1);
+ public static final FunctionIdentifier GLOBAL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-global-avg", 1);
+ public static final FunctionIdentifier INTERMEDIATE_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-intermediate-avg", 1);
+ public static final FunctionIdentifier LOCAL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-avg", 1);
+ public static final FunctionIdentifier FIRST_ELEMENT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-first-element", 1);
+ public static final FunctionIdentifier LOCAL_FIRST_ELEMENT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-first-element", 1);
public static final FunctionIdentifier SCALAR_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "avg", 1);
- public static final FunctionIdentifier SCALAR_COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "count",
- 1);
+ public static final FunctionIdentifier SCALAR_COUNT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "count", 1);
public static final FunctionIdentifier SCALAR_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sum", 1);
public static final FunctionIdentifier SCALAR_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "max", 1);
public static final FunctionIdentifier SCALAR_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "min", 1);
- public static final FunctionIdentifier SCALAR_GLOBAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "global-avg", 1);
- public static final FunctionIdentifier SCALAR_LOCAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "local-avg", 1);
- public static final FunctionIdentifier SCALAR_FIRST_ELEMENT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "first-element", 1);
+ public static final FunctionIdentifier SCALAR_GLOBAL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "global-avg", 1);
+ public static final FunctionIdentifier SCALAR_LOCAL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "local-avg", 1);
+ public static final FunctionIdentifier SCALAR_FIRST_ELEMENT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "first-element", 1);
// serializable aggregate functions
- public static final FunctionIdentifier SERIAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "avg-serial", 1);
- public static final FunctionIdentifier SERIAL_COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "count-serial", 1);
- public static final FunctionIdentifier SERIAL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sum-serial", 1);
- public static final FunctionIdentifier SERIAL_LOCAL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "local-sum-serial", 1);
- public static final FunctionIdentifier SERIAL_GLOBAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "global-avg-serial", 1);
- public static final FunctionIdentifier SERIAL_LOCAL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "local-avg-serial", 1);
- public static final FunctionIdentifier SERIAL_INTERMEDIATE_AVG = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "intermediate-avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_COUNT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "count-serial", 1);
+ public static final FunctionIdentifier SERIAL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sum-serial", 1);
+ public static final FunctionIdentifier SERIAL_LOCAL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "local-sum-serial", 1);
+ public static final FunctionIdentifier SERIAL_GLOBAL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "global-avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_LOCAL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "local-avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_INTERMEDIATE_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "intermediate-avg-serial", 1);
// distinct aggregate functions
- public static final FunctionIdentifier COUNT_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-count-distinct", 1);
- public static final FunctionIdentifier SCALAR_COUNT_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "count-distinct", 1);
- public static final FunctionIdentifier SUM_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-sum-distinct", 1);
- public static final FunctionIdentifier SCALAR_SUM_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sum-distinct", 1);
- public static final FunctionIdentifier AVG_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-avg-distinct", 1);
- public static final FunctionIdentifier SCALAR_AVG_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "avg-distinct", 1);
- public static final FunctionIdentifier MAX_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-max-distinct", 1);
- public static final FunctionIdentifier SCALAR_MAX_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "max-distinct", 1);
- public static final FunctionIdentifier MIN_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-min-distinct", 1);
- public static final FunctionIdentifier SCALAR_MIN_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "min-distinct", 1);
+ public static final FunctionIdentifier COUNT_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-count-distinct", 1);
+ public static final FunctionIdentifier SCALAR_COUNT_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "count-distinct", 1);
+ public static final FunctionIdentifier SUM_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sum-distinct", 1);
+ public static final FunctionIdentifier SCALAR_SUM_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sum-distinct", 1);
+ public static final FunctionIdentifier AVG_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-avg-distinct", 1);
+ public static final FunctionIdentifier SCALAR_AVG_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "avg-distinct", 1);
+ public static final FunctionIdentifier MAX_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-max-distinct", 1);
+ public static final FunctionIdentifier SCALAR_MAX_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "max-distinct", 1);
+ public static final FunctionIdentifier MIN_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-min-distinct", 1);
+ public static final FunctionIdentifier SCALAR_MIN_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "min-distinct", 1);
// sql aggregate functions
- public static final FunctionIdentifier SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-avg",
- 1);
- public static final FunctionIdentifier INTERMEDIATE_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "intermediate-agg-sql-avg", 1);
- public static final FunctionIdentifier SQL_COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-sql-count", 1);
- public static final FunctionIdentifier SQL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-sum",
- 1);
- public static final FunctionIdentifier LOCAL_SQL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-sql-sum", 1);
- public static final FunctionIdentifier SQL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-max",
- 1);
- public static final FunctionIdentifier LOCAL_SQL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-sql-max", 1);
- public static final FunctionIdentifier SQL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-min",
- 1);
- public static final FunctionIdentifier LOCAL_SQL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-sql-min", 1);
- public static final FunctionIdentifier GLOBAL_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-global-sql-avg", 1);
- public static final FunctionIdentifier LOCAL_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-local-sql-avg", 1);
+ public static final FunctionIdentifier SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-avg", 1);
+ public static final FunctionIdentifier INTERMEDIATE_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "intermediate-agg-sql-avg", 1);
+ public static final FunctionIdentifier SQL_COUNT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-count", 1);
+ public static final FunctionIdentifier SQL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-sum", 1);
+ public static final FunctionIdentifier LOCAL_SQL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-sql-sum", 1);
+ public static final FunctionIdentifier SQL_MAX =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-max", 1);
+ public static final FunctionIdentifier LOCAL_SQL_MAX =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-sql-max", 1);
+ public static final FunctionIdentifier SQL_MIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-min", 1);
+ public static final FunctionIdentifier LOCAL_SQL_MIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-sql-min", 1);
+ public static final FunctionIdentifier GLOBAL_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-global-sql-avg", 1);
+ public static final FunctionIdentifier LOCAL_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-local-sql-avg", 1);
- public static final FunctionIdentifier SCALAR_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-avg", 1);
- public static final FunctionIdentifier SCALAR_SQL_COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-count", 1);
- public static final FunctionIdentifier SCALAR_SQL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-sum", 1);
- public static final FunctionIdentifier SCALAR_SQL_MAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-max", 1);
- public static final FunctionIdentifier SCALAR_SQL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-min", 1);
- public static final FunctionIdentifier SCALAR_GLOBAL_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "global-sql-avg", 1);
- public static final FunctionIdentifier SCALAR_LOCAL_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "local-sql-avg", 1);
+ public static final FunctionIdentifier SCALAR_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-avg", 1);
+ public static final FunctionIdentifier SCALAR_SQL_COUNT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-count", 1);
+ public static final FunctionIdentifier SCALAR_SQL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-sum", 1);
+ public static final FunctionIdentifier SCALAR_SQL_MAX =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-max", 1);
+ public static final FunctionIdentifier SCALAR_SQL_MIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-min", 1);
+ public static final FunctionIdentifier SCALAR_GLOBAL_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "global-sql-avg", 1);
+ public static final FunctionIdentifier SCALAR_LOCAL_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "local-sql-avg", 1);
// serializable sql aggregate functions
- public static final FunctionIdentifier SERIAL_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-avg-serial", 1);
- public static final FunctionIdentifier SERIAL_SQL_COUNT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-count-serial", 1);
- public static final FunctionIdentifier SERIAL_SQL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sql-sum-serial", 1);
- public static final FunctionIdentifier SERIAL_LOCAL_SQL_SUM = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "local-sql-sum-serial", 1);
- public static final FunctionIdentifier SERIAL_GLOBAL_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "global-sql-avg-serial", 1);
- public static final FunctionIdentifier SERIAL_INTERMEDIATE_SQL_AVG = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "intermediate-sql-avg-serial", 1);
- public static final FunctionIdentifier SERIAL_LOCAL_SQL_AVG = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "local-sql-avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_SQL_COUNT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-count-serial", 1);
+ public static final FunctionIdentifier SERIAL_SQL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-sum-serial", 1);
+ public static final FunctionIdentifier SERIAL_LOCAL_SQL_SUM =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "local-sql-sum-serial", 1);
+ public static final FunctionIdentifier SERIAL_GLOBAL_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "global-sql-avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_INTERMEDIATE_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "intermediate-sql-avg-serial", 1);
+ public static final FunctionIdentifier SERIAL_LOCAL_SQL_AVG =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "local-sql-avg-serial", 1);
// distinct sql aggregate functions
- public static final FunctionIdentifier SQL_COUNT_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-sql-count-distinct", 1);
- public static final FunctionIdentifier SCALAR_SQL_COUNT_DISTINCT = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "sql-count-distinct", 1);
- public static final FunctionIdentifier SQL_SUM_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-sql-sum-distinct", 1);
- public static final FunctionIdentifier SCALAR_SQL_SUM_DISTINCT = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "sql-sum-distinct", 1);
- public static final FunctionIdentifier SQL_AVG_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-sql-avg-distinct", 1);
- public static final FunctionIdentifier SCALAR_SQL_AVG_DISTINCT = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "sql-avg-distinct", 1);
- public static final FunctionIdentifier SQL_MAX_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-sql-max-distinct", 1);
- public static final FunctionIdentifier SCALAR_SQL_MAX_DISTINCT = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "sql-max-distinct", 1);
- public static final FunctionIdentifier SQL_MIN_DISTINCT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "agg-sql-min-distinct", 1);
- public static final FunctionIdentifier SCALAR_SQL_MIN_DISTINCT = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "sql-min-distinct", 1);
+ public static final FunctionIdentifier SQL_COUNT_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-count-distinct", 1);
+ public static final FunctionIdentifier SCALAR_SQL_COUNT_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-count-distinct", 1);
+ public static final FunctionIdentifier SQL_SUM_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-sum-distinct", 1);
+ public static final FunctionIdentifier SCALAR_SQL_SUM_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-sum-distinct", 1);
+ public static final FunctionIdentifier SQL_AVG_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-avg-distinct", 1);
+ public static final FunctionIdentifier SCALAR_SQL_AVG_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-avg-distinct", 1);
+ public static final FunctionIdentifier SQL_MAX_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-max-distinct", 1);
+ public static final FunctionIdentifier SCALAR_SQL_MAX_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-max-distinct", 1);
+ public static final FunctionIdentifier SQL_MIN_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "agg-sql-min-distinct", 1);
+ public static final FunctionIdentifier SCALAR_SQL_MIN_DISTINCT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sql-min-distinct", 1);
- public static final FunctionIdentifier SCAN_COLLECTION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "scan-collection", 1);
- public static final FunctionIdentifier SUBSET_COLLECTION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "subset-collection", 3);
+ public static final FunctionIdentifier SCAN_COLLECTION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "scan-collection", 1);
+ public static final FunctionIdentifier SUBSET_COLLECTION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "subset-collection", 3);
public static final FunctionIdentifier RANGE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "range", 2);
// fuzzy functions:
- public static final FunctionIdentifier FUZZY_EQ = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "fuzzy-eq",
- 2);
+ public static final FunctionIdentifier FUZZY_EQ =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "fuzzy-eq", 2);
- public static final FunctionIdentifier PREFIX_LEN_JACCARD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "prefix-len-jaccard", 2);
+ public static final FunctionIdentifier PREFIX_LEN_JACCARD =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "prefix-len-jaccard", 2);
- public static final FunctionIdentifier SIMILARITY_JACCARD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "similarity-jaccard", 2);
- public static final FunctionIdentifier SIMILARITY_JACCARD_CHECK = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "similarity-jaccard-check", 3);
- public static final FunctionIdentifier SIMILARITY_JACCARD_SORTED = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "similarity-jaccard-sorted", 2);
- public static final FunctionIdentifier SIMILARITY_JACCARD_SORTED_CHECK = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "similarity-jaccard-sorted-check", 3);
- public static final FunctionIdentifier SIMILARITY_JACCARD_PREFIX = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "similarity-jaccard-prefix", 6);
- public static final FunctionIdentifier SIMILARITY_JACCARD_PREFIX_CHECK = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "similarity-jaccard-prefix-check", 6);
+ public static final FunctionIdentifier SIMILARITY_JACCARD =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "similarity-jaccard", 2);
+ public static final FunctionIdentifier SIMILARITY_JACCARD_CHECK =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "similarity-jaccard-check", 3);
+ public static final FunctionIdentifier SIMILARITY_JACCARD_SORTED =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "similarity-jaccard-sorted", 2);
+ public static final FunctionIdentifier SIMILARITY_JACCARD_SORTED_CHECK =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "similarity-jaccard-sorted-check", 3);
+ public static final FunctionIdentifier SIMILARITY_JACCARD_PREFIX =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "similarity-jaccard-prefix", 6);
+ public static final FunctionIdentifier SIMILARITY_JACCARD_PREFIX_CHECK =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "similarity-jaccard-prefix-check", 6);
- public static final FunctionIdentifier EDIT_DISTANCE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "edit-distance", 2);
- public static final FunctionIdentifier EDIT_DISTANCE_CHECK = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "edit-distance-check", 3);
- public static final FunctionIdentifier EDIT_DISTANCE_LIST_IS_FILTERABLE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "edit-distance-list-is-filterable", 2);
- public static final FunctionIdentifier EDIT_DISTANCE_STRING_IS_FILTERABLE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "edit-distance-string-is-filterable", 4);
- public static final FunctionIdentifier EDIT_DISTANCE_CONTAINS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "edit-distance-contains", 3);
+ public static final FunctionIdentifier EDIT_DISTANCE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "edit-distance", 2);
+ public static final FunctionIdentifier EDIT_DISTANCE_CHECK =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "edit-distance-check", 3);
+ public static final FunctionIdentifier EDIT_DISTANCE_LIST_IS_FILTERABLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "edit-distance-list-is-filterable", 2);
+ public static final FunctionIdentifier EDIT_DISTANCE_STRING_IS_FILTERABLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "edit-distance-string-is-filterable", 4);
+ public static final FunctionIdentifier EDIT_DISTANCE_CONTAINS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "edit-distance-contains", 3);
// full-text
public static final FunctionIdentifier FULLTEXT_CONTAINS =
@@ -547,271 +547,269 @@
new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ftcontains", 2);
// tokenizers:
- public static final FunctionIdentifier WORD_TOKENS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "word-tokens", 1);
- public static final FunctionIdentifier HASHED_WORD_TOKENS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "hashed-word-tokens", 1);
- public static final FunctionIdentifier COUNTHASHED_WORD_TOKENS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "counthashed-word-tokens", 1);
- public static final FunctionIdentifier GRAM_TOKENS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "gram-tokens", 3);
- public static final FunctionIdentifier HASHED_GRAM_TOKENS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "hashed-gram-tokens", 3);
- public static final FunctionIdentifier COUNTHASHED_GRAM_TOKENS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "counthashed-gram-tokens", 3);
+ public static final FunctionIdentifier WORD_TOKENS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "word-tokens", 1);
+ public static final FunctionIdentifier HASHED_WORD_TOKENS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "hashed-word-tokens", 1);
+ public static final FunctionIdentifier COUNTHASHED_WORD_TOKENS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "counthashed-word-tokens", 1);
+ public static final FunctionIdentifier GRAM_TOKENS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "gram-tokens", 3);
+ public static final FunctionIdentifier HASHED_GRAM_TOKENS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "hashed-gram-tokens", 3);
+ public static final FunctionIdentifier COUNTHASHED_GRAM_TOKENS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "counthashed-gram-tokens", 3);
public static final FunctionIdentifier TID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "tid", 0);
public static final FunctionIdentifier GTID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "gtid", 0);
// constructors:
- public static final FunctionIdentifier BOOLEAN_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "boolean", 1);
- public static final FunctionIdentifier STRING_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "string", 1);
- public static final FunctionIdentifier BINARY_HEX_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "hex", 1);
- public static final FunctionIdentifier BINARY_BASE64_CONSTRUCTOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "base64", 1);
- public static final FunctionIdentifier INT8_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "int8", 1);
- public static final FunctionIdentifier INT16_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "int16", 1);
- public static final FunctionIdentifier INT32_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "int32", 1);
- public static final FunctionIdentifier INT64_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "int64", 1);
- public static final FunctionIdentifier FLOAT_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "float", 1);
- public static final FunctionIdentifier DOUBLE_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "double", 1);
- public static final FunctionIdentifier POINT_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "point", 1);
- public static final FunctionIdentifier POINT3D_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "point3d", 1);
- public static final FunctionIdentifier LINE_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "line", 1);
- public static final FunctionIdentifier CIRCLE_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "circle", 1);
- public static final FunctionIdentifier RECTANGLE_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "rectangle", 1);
- public static final FunctionIdentifier POLYGON_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "polygon", 1);
- public static final FunctionIdentifier TIME_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "time", 1);
- public static final FunctionIdentifier DATE_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "date", 1);
- public static final FunctionIdentifier DATETIME_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "datetime", 1);
- public static final FunctionIdentifier DURATION_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "duration", 1);
- public static final FunctionIdentifier UUID_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "uuid", 1);
+ public static final FunctionIdentifier BOOLEAN_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "boolean", 1);
+ public static final FunctionIdentifier STRING_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "string", 1);
+ public static final FunctionIdentifier BINARY_HEX_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "hex", 1);
+ public static final FunctionIdentifier BINARY_BASE64_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "base64", 1);
+ public static final FunctionIdentifier INT8_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "int8", 1);
+ public static final FunctionIdentifier INT16_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "int16", 1);
+ public static final FunctionIdentifier INT32_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "int32", 1);
+ public static final FunctionIdentifier INT64_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "int64", 1);
+ public static final FunctionIdentifier FLOAT_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "float", 1);
+ public static final FunctionIdentifier DOUBLE_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "double", 1);
+ public static final FunctionIdentifier POINT_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "point", 1);
+ public static final FunctionIdentifier POINT3D_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "point3d", 1);
+ public static final FunctionIdentifier LINE_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "line", 1);
+ public static final FunctionIdentifier CIRCLE_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "circle", 1);
+ public static final FunctionIdentifier RECTANGLE_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "rectangle", 1);
+ public static final FunctionIdentifier POLYGON_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "polygon", 1);
+ public static final FunctionIdentifier TIME_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "time", 1);
+ public static final FunctionIdentifier DATE_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "date", 1);
+ public static final FunctionIdentifier DATETIME_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "datetime", 1);
+ public static final FunctionIdentifier DURATION_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "duration", 1);
+ public static final FunctionIdentifier UUID_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "uuid", 1);
- public static final FunctionIdentifier YEAR_MONTH_DURATION_CONSTRUCTOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "year-month-duration", 1);
- public static final FunctionIdentifier DAY_TIME_DURATION_CONSTRUCTOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "day-time-duration", 1);
+ public static final FunctionIdentifier YEAR_MONTH_DURATION_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year-month-duration", 1);
+ public static final FunctionIdentifier DAY_TIME_DURATION_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "day-time-duration", 1);
- public static final FunctionIdentifier INTERVAL_CONSTRUCTOR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval", 2);
- public static final FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "interval-start-from-date", 2);
- public static final FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_TIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "interval-start-from-time", 2);
- public static final FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATETIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "interval-start-from-datetime", 2);
- public static final FunctionIdentifier INTERVAL_BEFORE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-before", 2);
- public static final FunctionIdentifier INTERVAL_AFTER = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-after", 2);
- public static final FunctionIdentifier INTERVAL_MEETS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-meets", 2);
- public static final FunctionIdentifier INTERVAL_MET_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-met-by", 2);
- public static final FunctionIdentifier INTERVAL_OVERLAPS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-overlaps", 2);
- public static final FunctionIdentifier INTERVAL_OVERLAPPED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-overlapped-by", 2);
- public static final FunctionIdentifier INTERVAL_OVERLAPPING = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-overlapping", 2);
- public static final FunctionIdentifier INTERVAL_STARTS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-starts", 2);
- public static final FunctionIdentifier INTERVAL_STARTED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-started-by", 2);
- public static final FunctionIdentifier INTERVAL_COVERS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-covers", 2);
- public static final FunctionIdentifier INTERVAL_COVERED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-covered-by", 2);
- public static final FunctionIdentifier INTERVAL_ENDS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-ends", 2);
- public static final FunctionIdentifier INTERVAL_ENDED_BY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-ended-by", 2);
- public static final FunctionIdentifier CURRENT_TIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "current-time", 0);
- public static final FunctionIdentifier CURRENT_DATE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "current-date", 0);
- public static final FunctionIdentifier CURRENT_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "current-datetime", 0);
- public static final FunctionIdentifier DURATION_EQUAL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "duration-equal", 2);
- public static final FunctionIdentifier YEAR_MONTH_DURATION_GREATER_THAN = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "year-month-duration-greater-than", 2);
- public static final FunctionIdentifier YEAR_MONTH_DURATION_LESS_THAN = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "year-month-duration-less-than", 2);
- public static final FunctionIdentifier DAY_TIME_DURATION_GREATER_THAN = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "day-time-duration-greater-than", 2);
- public static final FunctionIdentifier DAY_TIME_DURATION_LESS_THAN = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "day-time-duration-less-than", 2);
- public static final FunctionIdentifier DURATION_FROM_MONTHS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "duration-from-months", 1);
- public static final FunctionIdentifier MONTHS_FROM_YEAR_MONTH_DURATION = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "months-from-year-month-duration", 1);
- public static final FunctionIdentifier DURATION_FROM_MILLISECONDS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "duration-from-ms", 1);
- public static final FunctionIdentifier MILLISECONDS_FROM_DAY_TIME_DURATION = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "ms-from-day-time-duration", 1);
+ public static final FunctionIdentifier INTERVAL_CONSTRUCTOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval", 2);
+ public static final FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-start-from-date", 2);
+ public static final FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_TIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-start-from-time", 2);
+ public static final FunctionIdentifier INTERVAL_CONSTRUCTOR_START_FROM_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-start-from-datetime", 2);
+ public static final FunctionIdentifier INTERVAL_BEFORE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-before", 2);
+ public static final FunctionIdentifier INTERVAL_AFTER =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-after", 2);
+ public static final FunctionIdentifier INTERVAL_MEETS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-meets", 2);
+ public static final FunctionIdentifier INTERVAL_MET_BY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-met-by", 2);
+ public static final FunctionIdentifier INTERVAL_OVERLAPS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-overlaps", 2);
+ public static final FunctionIdentifier INTERVAL_OVERLAPPED_BY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-overlapped-by", 2);
+ public static final FunctionIdentifier INTERVAL_OVERLAPPING =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-overlapping", 2);
+ public static final FunctionIdentifier INTERVAL_STARTS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-starts", 2);
+ public static final FunctionIdentifier INTERVAL_STARTED_BY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-started-by", 2);
+ public static final FunctionIdentifier INTERVAL_COVERS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-covers", 2);
+ public static final FunctionIdentifier INTERVAL_COVERED_BY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-covered-by", 2);
+ public static final FunctionIdentifier INTERVAL_ENDS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-ends", 2);
+ public static final FunctionIdentifier INTERVAL_ENDED_BY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-ended-by", 2);
+ public static final FunctionIdentifier CURRENT_TIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "current-time", 0);
+ public static final FunctionIdentifier CURRENT_DATE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "current-date", 0);
+ public static final FunctionIdentifier CURRENT_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "current-datetime", 0);
+ public static final FunctionIdentifier DURATION_EQUAL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "duration-equal", 2);
+ public static final FunctionIdentifier YEAR_MONTH_DURATION_GREATER_THAN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year-month-duration-greater-than", 2);
+ public static final FunctionIdentifier YEAR_MONTH_DURATION_LESS_THAN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "year-month-duration-less-than", 2);
+ public static final FunctionIdentifier DAY_TIME_DURATION_GREATER_THAN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "day-time-duration-greater-than", 2);
+ public static final FunctionIdentifier DAY_TIME_DURATION_LESS_THAN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "day-time-duration-less-than", 2);
+ public static final FunctionIdentifier DURATION_FROM_MONTHS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "duration-from-months", 1);
+ public static final FunctionIdentifier MONTHS_FROM_YEAR_MONTH_DURATION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "months-from-year-month-duration", 1);
+ public static final FunctionIdentifier DURATION_FROM_MILLISECONDS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "duration-from-ms", 1);
+ public static final FunctionIdentifier MILLISECONDS_FROM_DAY_TIME_DURATION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "ms-from-day-time-duration", 1);
- public static final FunctionIdentifier GET_YEAR_MONTH_DURATION = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-year-month-duration", 1);
- public static final FunctionIdentifier GET_DAY_TIME_DURATION = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-day-time-duration", 1);
- public static final FunctionIdentifier DURATION_FROM_INTERVAL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "duration-from-interval", 1);
+ public static final FunctionIdentifier GET_YEAR_MONTH_DURATION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-year-month-duration", 1);
+ public static final FunctionIdentifier GET_DAY_TIME_DURATION =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-day-time-duration", 1);
+ public static final FunctionIdentifier DURATION_FROM_INTERVAL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "duration-from-interval", 1);
// spatial
- public static final FunctionIdentifier CREATE_POINT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-point", 2);
- public static final FunctionIdentifier CREATE_LINE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-line", 2);
- public static final FunctionIdentifier CREATE_POLYGON = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-polygon", 1);
- public static final FunctionIdentifier CREATE_CIRCLE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-circle", 2);
- public static final FunctionIdentifier CREATE_RECTANGLE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-rectangle", 2);
- public static final FunctionIdentifier SPATIAL_INTERSECT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "spatial-intersect", 2);
- public static final FunctionIdentifier SPATIAL_AREA = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "spatial-area", 1);
- public static final FunctionIdentifier SPATIAL_DISTANCE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "spatial-distance", 2);
- public static final FunctionIdentifier CREATE_MBR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-mbr", 3);
- public static final FunctionIdentifier SPATIAL_CELL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "spatial-cell", 4);
- public static final FunctionIdentifier SWITCH_CASE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "switch-case", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier SLEEP = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "sleep", 2);
- public static final FunctionIdentifier INJECT_FAILURE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "inject-failure", 2);
- public static final FunctionIdentifier FLOW_RECORD = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "flow-object", 1);
- public static final FunctionIdentifier CAST_TYPE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "cast", 1);
- public static final FunctionIdentifier CAST_TYPE_LAX = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "cast-lax", 1);
+ public static final FunctionIdentifier CREATE_POINT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-point", 2);
+ public static final FunctionIdentifier CREATE_LINE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-line", 2);
+ public static final FunctionIdentifier CREATE_POLYGON =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-polygon", 1);
+ public static final FunctionIdentifier CREATE_CIRCLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-circle", 2);
+ public static final FunctionIdentifier CREATE_RECTANGLE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-rectangle", 2);
+ public static final FunctionIdentifier SPATIAL_INTERSECT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "spatial-intersect", 2);
+ public static final FunctionIdentifier SPATIAL_AREA =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "spatial-area", 1);
+ public static final FunctionIdentifier SPATIAL_DISTANCE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "spatial-distance", 2);
+ public static final FunctionIdentifier CREATE_MBR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-mbr", 3);
+ public static final FunctionIdentifier SPATIAL_CELL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "spatial-cell", 4);
+ public static final FunctionIdentifier SWITCH_CASE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "switch-case", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier SLEEP = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "sleep", 2);
+ public static final FunctionIdentifier INJECT_FAILURE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "inject-failure", 2);
+ public static final FunctionIdentifier FLOW_RECORD =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "flow-object", 1);
+ public static final FunctionIdentifier CAST_TYPE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "cast", 1);
+ public static final FunctionIdentifier CAST_TYPE_LAX =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "cast-lax", 1);
- public static final FunctionIdentifier CREATE_UUID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-uuid", 0);
+ public static final FunctionIdentifier CREATE_UUID =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-uuid", 0);
public static final FunctionIdentifier UUID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "uuid", 0);
- public static final FunctionIdentifier CREATE_QUERY_UID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "create-query-uid", 0);
+ public static final FunctionIdentifier CREATE_QUERY_UID =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "create-query-uid", 0);
// Spatial and temporal type accessors
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_YEAR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-year", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_MONTH = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-month", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_DAY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-day", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_HOUR = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-hour", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_MIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-minute", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_SEC = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-second", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_MILLISEC = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-millisecond", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-start", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-end", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START_DATETIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-start-datetime", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END_DATETIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-end-datetime", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START_DATE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-start-date", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END_DATE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-end-date", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START_TIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-start-time", 1);
- public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END_TIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-interval-end-time", 1);
- public static final FunctionIdentifier INTERVAL_BIN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "interval-bin", 3);
- public static final FunctionIdentifier OVERLAP_BINS = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "overlap-bins", 3);
- public static final FunctionIdentifier GET_OVERLAPPING_INTERVAL = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-overlapping-interval", 2);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_YEAR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-year", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_MONTH =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-month", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_DAY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-day", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_HOUR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-hour", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_MIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-minute", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_SEC =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-second", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_MILLISEC =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-millisecond", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-start", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-end", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-start-datetime", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-end-datetime", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START_DATE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-start-date", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END_DATE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-end-date", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_START_TIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-start-time", 1);
+ public static final FunctionIdentifier ACCESSOR_TEMPORAL_INTERVAL_END_TIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-interval-end-time", 1);
+ public static final FunctionIdentifier INTERVAL_BIN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "interval-bin", 3);
+ public static final FunctionIdentifier OVERLAP_BINS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "overlap-bins", 3);
+ public static final FunctionIdentifier GET_OVERLAPPING_INTERVAL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-overlapping-interval", 2);
// Temporal functions
- public static final FunctionIdentifier UNIX_TIME_FROM_DATE_IN_DAYS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "unix-time-from-date-in-days", 1);
- public final static FunctionIdentifier UNIX_TIME_FROM_TIME_IN_MS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "unix-time-from-time-in-ms", 1);
- public final static FunctionIdentifier UNIX_TIME_FROM_DATETIME_IN_MS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "unix-time-from-datetime-in-ms", 1);
- public final static FunctionIdentifier UNIX_TIME_FROM_DATETIME_IN_SECS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "unix-time-from-datetime-in-secs", 1);
- public static final FunctionIdentifier DATE_FROM_UNIX_TIME_IN_DAYS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "date-from-unix-time-in-days", 1);
- public static final FunctionIdentifier DATE_FROM_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-date-from-datetime", 1);
- public static final FunctionIdentifier TIME_FROM_UNIX_TIME_IN_MS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "time-from-unix-time-in-ms", 1);
- public static final FunctionIdentifier TIME_FROM_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "get-time-from-datetime", 1);
- public static final FunctionIdentifier DATETIME_FROM_UNIX_TIME_IN_MS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "datetime-from-unix-time-in-ms", 1);
- public static final FunctionIdentifier DATETIME_FROM_UNIX_TIME_IN_SECS = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "datetime-from-unix-time-in-secs", 1);
- public static final FunctionIdentifier DATETIME_FROM_DATE_TIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "datetime-from-date-time", 2);
- public static final FunctionIdentifier CALENDAR_DURATION_FROM_DATETIME = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "calendar-duration-from-datetime", 2);
- public static final FunctionIdentifier CALENDAR_DURATION_FROM_DATE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "calendar-duration-from-date", 2);
- public static final FunctionIdentifier ADJUST_TIME_FOR_TIMEZONE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "adjust-time-for-timezone", 2);
- public static final FunctionIdentifier ADJUST_DATETIME_FOR_TIMEZONE = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "adjust-datetime-for-timezone", 2);
- public static final FunctionIdentifier DAY_OF_WEEK = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "day-of-week", 1);
- public static final FunctionIdentifier PARSE_DATE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "parse-date", 2);
- public static final FunctionIdentifier PARSE_TIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "parse-time", 2);
- public static final FunctionIdentifier PARSE_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "parse-datetime", 2);
- public static final FunctionIdentifier PRINT_DATE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "print-date", 2);
- public static final FunctionIdentifier PRINT_TIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "print-time", 2);
- public static final FunctionIdentifier PRINT_DATETIME = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "print-datetime", 2);
+ public static final FunctionIdentifier UNIX_TIME_FROM_DATE_IN_DAYS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "unix-time-from-date-in-days", 1);
+ public final static FunctionIdentifier UNIX_TIME_FROM_TIME_IN_MS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "unix-time-from-time-in-ms", 1);
+ public final static FunctionIdentifier UNIX_TIME_FROM_DATETIME_IN_MS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "unix-time-from-datetime-in-ms", 1);
+ public final static FunctionIdentifier UNIX_TIME_FROM_DATETIME_IN_SECS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "unix-time-from-datetime-in-secs", 1);
+ public static final FunctionIdentifier DATE_FROM_UNIX_TIME_IN_DAYS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "date-from-unix-time-in-days", 1);
+ public static final FunctionIdentifier DATE_FROM_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-date-from-datetime", 1);
+ public static final FunctionIdentifier TIME_FROM_UNIX_TIME_IN_MS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "time-from-unix-time-in-ms", 1);
+ public static final FunctionIdentifier TIME_FROM_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-time-from-datetime", 1);
+ public static final FunctionIdentifier DATETIME_FROM_UNIX_TIME_IN_MS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "datetime-from-unix-time-in-ms", 1);
+ public static final FunctionIdentifier DATETIME_FROM_UNIX_TIME_IN_SECS =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "datetime-from-unix-time-in-secs", 1);
+ public static final FunctionIdentifier DATETIME_FROM_DATE_TIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "datetime-from-date-time", 2);
+ public static final FunctionIdentifier CALENDAR_DURATION_FROM_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "calendar-duration-from-datetime", 2);
+ public static final FunctionIdentifier CALENDAR_DURATION_FROM_DATE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "calendar-duration-from-date", 2);
+ public static final FunctionIdentifier ADJUST_TIME_FOR_TIMEZONE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "adjust-time-for-timezone", 2);
+ public static final FunctionIdentifier ADJUST_DATETIME_FOR_TIMEZONE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "adjust-datetime-for-timezone", 2);
+ public static final FunctionIdentifier DAY_OF_WEEK =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "day-of-week", 1);
+ public static final FunctionIdentifier PARSE_DATE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "parse-date", 2);
+ public static final FunctionIdentifier PARSE_TIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "parse-time", 2);
+ public static final FunctionIdentifier PARSE_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "parse-datetime", 2);
+ public static final FunctionIdentifier PRINT_DATE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "print-date", 2);
+ public static final FunctionIdentifier PRINT_TIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "print-time", 2);
+ public static final FunctionIdentifier PRINT_DATETIME =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "print-datetime", 2);
- public static final FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-x", 1);
- public static final FunctionIdentifier GET_POINT_Y_COORDINATE_ACCESSOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-y", 1);
- public static final FunctionIdentifier GET_CIRCLE_RADIUS_ACCESSOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-radius", 1);
- public static final FunctionIdentifier GET_CIRCLE_CENTER_ACCESSOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-center", 1);
- public static final FunctionIdentifier GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR = new FunctionIdentifier(
- FunctionConstants.ASTERIX_NS, "get-points", 1);
+ public static final FunctionIdentifier GET_POINT_X_COORDINATE_ACCESSOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-x", 1);
+ public static final FunctionIdentifier GET_POINT_Y_COORDINATE_ACCESSOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-y", 1);
+ public static final FunctionIdentifier GET_CIRCLE_RADIUS_ACCESSOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-radius", 1);
+ public static final FunctionIdentifier GET_CIRCLE_CENTER_ACCESSOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-center", 1);
+ public static final FunctionIdentifier GET_POINTS_LINE_RECTANGLE_POLYGON_ACCESSOR =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-points", 1);
public static final FunctionIdentifier EQ = AlgebricksBuiltinFunctions.EQ;
public static final FunctionIdentifier LE = AlgebricksBuiltinFunctions.LE;
@@ -825,32 +823,32 @@
public static final FunctionIdentifier NUMERIC_ADD = AlgebricksBuiltinFunctions.NUMERIC_ADD;
public static final FunctionIdentifier IS_MISSING = AlgebricksBuiltinFunctions.IS_MISSING;
public static final FunctionIdentifier IS_NULL = AlgebricksBuiltinFunctions.IS_NULL;
- public static final FunctionIdentifier IS_UNKNOWN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "is-unknown", 1);
- public static final FunctionIdentifier IS_BOOLEAN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "is-boolean", 1);
- public static final FunctionIdentifier IS_NUMBER = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-number",
- 1);
- public static final FunctionIdentifier IS_STRING = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-string",
- 1);
- public static final FunctionIdentifier IS_ARRAY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-array",
- 1);
- public static final FunctionIdentifier IS_OBJECT = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-object",
- 1);
+ public static final FunctionIdentifier IS_UNKNOWN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-unknown", 1);
+ public static final FunctionIdentifier IS_BOOLEAN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-boolean", 1);
+ public static final FunctionIdentifier IS_NUMBER =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-number", 1);
+ public static final FunctionIdentifier IS_STRING =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-string", 1);
+ public static final FunctionIdentifier IS_ARRAY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-array", 1);
+ public static final FunctionIdentifier IS_OBJECT =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-object", 1);
- public static final FunctionIdentifier IS_SYSTEM_NULL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "is-system-null", 1);
- public static final FunctionIdentifier CHECK_UNKNOWN = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "check-unknown", 1);
- public static final FunctionIdentifier COLLECTION_TO_SEQUENCE = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "collection-to-sequence", 1);
+ public static final FunctionIdentifier IS_SYSTEM_NULL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "is-system-null", 1);
+ public static final FunctionIdentifier CHECK_UNKNOWN =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "check-unknown", 1);
+ public static final FunctionIdentifier COLLECTION_TO_SEQUENCE =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "collection-to-sequence", 1);
- public static final FunctionIdentifier IF_MISSING = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "if-missing", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier IF_NULL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "if-null", FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier IF_MISSING_OR_NULL = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "if-missing-or-null", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier IF_MISSING =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "if-missing", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier IF_NULL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "if-null", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier IF_MISSING_OR_NULL =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "if-missing-or-null", FunctionIdentifier.VARARGS);
public static final FunctionIdentifier TO_BOOLEAN =
new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "to-boolean", 1);
@@ -861,16 +859,16 @@
public static final FunctionIdentifier TO_BIGINT =
new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "to-bigint", 1);
- public static final FunctionIdentifier EXTERNAL_LOOKUP = new FunctionIdentifier(FunctionConstants.ASTERIX_NS,
- "external-lookup", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier EXTERNAL_LOOKUP =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "external-lookup", FunctionIdentifier.VARARGS);
public static final FunctionIdentifier GET_JOB_PARAMETER =
new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "get-job-param", 1);
- public static final FunctionIdentifier META = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "meta",
- FunctionIdentifier.VARARGS);
- public static final FunctionIdentifier META_KEY = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "meta-key",
- FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier META =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "meta", FunctionIdentifier.VARARGS);
+ public static final FunctionIdentifier META_KEY =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "meta-key", FunctionIdentifier.VARARGS);
public static IFunctionInfo getAsterixFunctionInfo(FunctionIdentifier fid) {
return registeredFunctions.get(fid);
@@ -1534,8 +1532,8 @@
}
public static boolean isBuiltinCompilerFunction(FunctionSignature signature, boolean includePrivateFunctions) {
- FunctionIdentifier fi = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, signature.getName(),
- signature.getArity());
+ FunctionIdentifier fi =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, signature.getName(), signature.getArity());
IFunctionInfo finfo = getAsterixFunctionInfo(fi);
if (builtinPublicFunctionsSet.keySet().contains(finfo)
|| (includePrivateFunctions && builtinPrivateFunctionsSet.keySet().contains(finfo))) {
@@ -1701,8 +1699,7 @@
}
static {
- spatialFilterFunctions.put(getAsterixFunctionInfo(BuiltinFunctions.SPATIAL_INTERSECT),
- SpatialFilterKind.SI);
+ spatialFilterFunctions.put(getAsterixFunctionInfo(BuiltinFunctions.SPATIAL_INTERSECT), SpatialFilterKind.SI);
}
public static boolean isGlobalAggregateFunction(FunctionIdentifier fi) {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java
index 0042b26..1bbfc43 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/functions/ExternalFunctionInfo.java
@@ -35,8 +35,8 @@
private final FunctionKind kind;
private final IAType returnType;
- public ExternalFunctionInfo(String namespace, String name, int arity, FunctionKind kind,
- List<IAType> argumentTypes, IAType returnType, IResultTypeComputer rtc, String body, String language) {
+ public ExternalFunctionInfo(String namespace, String name, int arity, FunctionKind kind, List<IAType> argumentTypes,
+ IAType returnType, IResultTypeComputer rtc, String body, String language) {
super(namespace, name, arity, true);
this.rtc = rtc;
this.argumentTypes = argumentTypes;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
index 45e56aba..b6ba848 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/ARecordVisitablePointable.java
@@ -227,8 +227,8 @@
}
} else {
typeTag = fieldTypes[fieldNumber].getTypeTag();
- fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffsets[fieldNumber],
- typeTag, false);
+ fieldValueLength =
+ NonTaggedFormatUtil.getFieldValueLength(b, fieldOffsets[fieldNumber], typeTag, false);
}
// set field value (including the type tag)
int fstart = dataBos.size();
@@ -246,8 +246,8 @@
for (int i = 0; i < numberOfOpenFields; i++) {
// set the field name (including a type tag, which is
// astring)
- int fieldValueLength = NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, ATypeTag.STRING,
- false);
+ int fieldValueLength =
+ NonTaggedFormatUtil.getFieldValueLength(b, fieldOffset, ATypeTag.STRING, false);
int fnstart = dataBos.size();
dataDos.writeByte(ATypeTag.SERIALIZED_STRING_TYPE_TAG);
dataDos.write(b, fieldOffset, fieldValueLength);
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java
index 5f463e6..abd4841 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/PointableAllocator.java
@@ -37,21 +37,21 @@
*/
public class PointableAllocator {
- private IObjectPool<IVisitablePointable, IAType> flatValueAllocator = new ListObjectPool<IVisitablePointable, IAType>(
- AFlatValuePointable.FACTORY);
- private IObjectPool<IVisitablePointable, IAType> recordValueAllocator = new ListObjectPool<IVisitablePointable, IAType>(
- ARecordVisitablePointable.FACTORY);
- private IObjectPool<IVisitablePointable, IAType> listValueAllocator = new ListObjectPool<IVisitablePointable, IAType>(
- AListVisitablePointable.FACTORY);
- private IObjectPool<AOrderedListType, IAType> orederedListTypeAllocator = new ListObjectPool<AOrderedListType, IAType>(
- new IObjectFactory<AOrderedListType, IAType>() {
+ private IObjectPool<IVisitablePointable, IAType> flatValueAllocator =
+ new ListObjectPool<IVisitablePointable, IAType>(AFlatValuePointable.FACTORY);
+ private IObjectPool<IVisitablePointable, IAType> recordValueAllocator =
+ new ListObjectPool<IVisitablePointable, IAType>(ARecordVisitablePointable.FACTORY);
+ private IObjectPool<IVisitablePointable, IAType> listValueAllocator =
+ new ListObjectPool<IVisitablePointable, IAType>(AListVisitablePointable.FACTORY);
+ private IObjectPool<AOrderedListType, IAType> orederedListTypeAllocator =
+ new ListObjectPool<AOrderedListType, IAType>(new IObjectFactory<AOrderedListType, IAType>() {
@Override
public AOrderedListType create(IAType type) {
return new AOrderedListType(type, type.getTypeName() + "OrderedList");
}
});
- private IObjectPool<AOrderedListType, IAType> unorederedListTypeAllocator = new ListObjectPool<AOrderedListType, IAType>(
- new IObjectFactory<AOrderedListType, IAType>() {
+ private IObjectPool<AOrderedListType, IAType> unorederedListTypeAllocator =
+ new ListObjectPool<AOrderedListType, IAType>(new IObjectFactory<AOrderedListType, IAType>() {
@Override
public AOrderedListType create(IAType type) {
return new AOrderedListType(type, type.getTypeName() + "UnorderedList");
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/base/DefaultOpenFieldType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/base/DefaultOpenFieldType.java
index 3b8f233..868aab5 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/base/DefaultOpenFieldType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/base/DefaultOpenFieldType.java
@@ -42,12 +42,12 @@
}
// nested open list type
- public static AOrderedListType NESTED_OPEN_AORDERED_LIST_TYPE = new AOrderedListType(BuiltinType.ANY,
- "nested-ordered-list");
+ public static AOrderedListType NESTED_OPEN_AORDERED_LIST_TYPE =
+ new AOrderedListType(BuiltinType.ANY, "nested-ordered-list");
// nested open list type
- public static AUnorderedListType NESTED_OPEN_AUNORDERED_LIST_TYPE = new AUnorderedListType(BuiltinType.ANY,
- "nested-unordered-list");
+ public static AUnorderedListType NESTED_OPEN_AUNORDERED_LIST_TYPE =
+ new AUnorderedListType(BuiltinType.ANY, "nested-unordered-list");
public static IAType getDefaultOpenFieldType(ATypeTag tag) {
if (tag.equals(ATypeTag.OBJECT))
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java
index 72e3072..6eb7d4c 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ACastVisitor.java
@@ -112,8 +112,8 @@
arg.first.set(accessor);
return null;
}
- ATypeTag inputTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(accessor.getByteArray()[accessor.getStartOffset()]);
+ ATypeTag inputTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(accessor.getByteArray()[accessor.getStartOffset()]);
if (!needPromote(inputTypeTag, reqTypeTag)) {
arg.first.set(accessor);
} else {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java
index 9635ac7..ace6769 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/AListCaster.java
@@ -46,8 +46,8 @@
// for storing the cast result
private final IVisitablePointable itemTempReference = PointableAllocator.allocateUnrestableEmpty();
- private final Triple<IVisitablePointable, IAType, Boolean> itemVisitorArg = new Triple<>(itemTempReference, null,
- null);
+ private final Triple<IVisitablePointable, IAType, Boolean> itemVisitorArg =
+ new Triple<>(itemTempReference, null, null);
private final UnorderedListBuilder unOrderedListBuilder = new UnorderedListBuilder();
private final OrderedListBuilder orderedListBuilder = new OrderedListBuilder();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
index 25a2f2b..aa6e26b 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/cast/ARecordCaster.java
@@ -73,15 +73,15 @@
private final IVisitablePointable nullTypeTag = PointableAllocator.allocateUnrestableEmpty();
private final IVisitablePointable missingTypeTag = PointableAllocator.allocateUnrestableEmpty();
- private final IBinaryComparator fieldNameComparator = PointableBinaryComparatorFactory
- .of(UTF8StringPointable.FACTORY).createBinaryComparator();
+ private final IBinaryComparator fieldNameComparator =
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
private final ByteArrayAccessibleOutputStream outputBos = new ByteArrayAccessibleOutputStream();
private final DataOutputStream outputDos = new DataOutputStream(outputBos);
private final IVisitablePointable fieldTempReference = PointableAllocator.allocateUnrestableEmpty();
- private final Triple<IVisitablePointable, IAType, Boolean> nestedVisitorArg = new Triple<>(fieldTempReference, null,
- null);
+ private final Triple<IVisitablePointable, IAType, Boolean> nestedVisitorArg =
+ new Triple<>(fieldTempReference, null, null);
private int numInputFields = 0;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/AListPointable.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/AListPointable.java
index 114ff2b..7c74a69 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/AListPointable.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/AListPointable.java
@@ -189,8 +189,7 @@
}
}
- public void getItemValue(AbstractCollectionType inputType, int index, DataOutput dOut)
- throws IOException {
+ public void getItemValue(AbstractCollectionType inputType, int index, DataOutput dOut) throws IOException {
if (getType() != ATypeTag.ANY.serialize()) {
dOut.writeByte(getType());
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java
index af7e29f..77491e8 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/pointables/nonvisitor/ARecordPointable.java
@@ -219,8 +219,7 @@
// Closed field accessors.
// -----------------------
- public void getClosedFieldValue(ARecordType recordType, int fieldId, DataOutput dOut)
- throws IOException {
+ public void getClosedFieldValue(ARecordType recordType, int fieldId, DataOutput dOut) throws IOException {
if (isClosedFieldNull(recordType, fieldId)) {
dOut.writeByte(ATypeTag.SERIALIZED_NULL_TYPE_TAG);
} else if (isClosedFieldMissing(recordType, fieldId)) {
@@ -286,8 +285,7 @@
// Open field accessors.
// -----------------------
- public void getOpenFieldValue(ARecordType recordType, int fieldId, DataOutput dOut)
- throws IOException {
+ public void getOpenFieldValue(ARecordType recordType, int fieldId, DataOutput dOut) throws IOException {
dOut.write(bytes, getOpenFieldValueOffset(recordType, fieldId), getOpenFieldValueSize(recordType, fieldId));
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CollectionMemberResultType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CollectionMemberResultType.java
index f8f6925..cc2f9b6 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CollectionMemberResultType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/CollectionMemberResultType.java
@@ -37,8 +37,7 @@
protected void checkArgType(String funcName, int argIndex, IAType type) throws AlgebricksException {
ATypeTag actualTypeTag = type.getTypeTag();
if (type.getTypeTag() != ATypeTag.MULTISET && type.getTypeTag() != ATypeTag.ARRAY) {
- throw new TypeMismatchException(funcName, argIndex, actualTypeTag, ATypeTag.MULTISET,
- ATypeTag.ARRAY);
+ throw new TypeMismatchException(funcName, argIndex, actualTypeTag, ATypeTag.MULTISET, ATypeTag.ARRAY);
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/FieldAccessNestedResultType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/FieldAccessNestedResultType.java
index 21593d9e..3eae173 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/FieldAccessNestedResultType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/FieldAccessNestedResultType.java
@@ -59,8 +59,7 @@
checkOrderedList(funcName, type);
break;
default:
- throw new TypeMismatchException(funcName, argIndex, actualTypeTag, ATypeTag.STRING,
- ATypeTag.ARRAY);
+ throw new TypeMismatchException(funcName, argIndex, actualTypeTag, ATypeTag.STRING, ATypeTag.ARRAY);
}
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/NonTaggedGetItemResultType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/NonTaggedGetItemResultType.java
index a81ef34..575ec52 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/NonTaggedGetItemResultType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/NonTaggedGetItemResultType.java
@@ -41,8 +41,7 @@
ATypeTag actualTypeTag = type.getTypeTag();
if (argIndex == 0) {
if (type.getTypeTag() != ATypeTag.MULTISET && type.getTypeTag() != ATypeTag.ARRAY) {
- throw new TypeMismatchException(funcName, argIndex, actualTypeTag, ATypeTag.STRING,
- ATypeTag.ARRAY);
+ throw new TypeMismatchException(funcName, argIndex, actualTypeTag, ATypeTag.STRING, ATypeTag.ARRAY);
}
} else {
if (!ATypeHierarchy.isCompatible(type.getTypeTag(), ATypeTag.INTEGER)) {
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
index 5167f30..db4d1c4 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordMergeTypeComputer.java
@@ -131,17 +131,17 @@
// If a sub-record do merge, else ignore and let the values decide what to do
if (fieldType1Copy.getFieldTypes()[i].getTypeTag() == ATypeTag.OBJECT) {
IAType[] oldTypes = resultType.getFieldTypes();
- oldTypes[pos] = mergedNestedType(fname, fieldType1Copy.getFieldTypes()[i],
- resultType.getFieldTypes()[pos]);
+ oldTypes[pos] =
+ mergedNestedType(fname, fieldType1Copy.getFieldTypes()[i], resultType.getFieldTypes()[pos]);
resultType = new ARecordType(resultType.getTypeName(), resultType.getFieldNames(), oldTypes,
- resultType.isOpen());
+ resultType.isOpen());
}
} else {
- IAType[] combinedFieldTypes = ArrayUtils.addAll(resultType.getFieldTypes().clone(),
- fieldType1Copy.getFieldTypes()[i]);
+ IAType[] combinedFieldTypes =
+ ArrayUtils.addAll(resultType.getFieldTypes().clone(), fieldType1Copy.getFieldTypes()[i]);
resultType = new ARecordType(resultType.getTypeName(),
- ArrayUtils.addAll(resultType.getFieldNames(), fieldType1Copy.getFieldNames()[i]),
- combinedFieldTypes, resultType.isOpen());
+ ArrayUtils.addAll(resultType.getFieldNames(), fieldType1Copy.getFieldNames()[i]),
+ combinedFieldTypes, resultType.isOpen());
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java
index 3bd64d6..e0b10d1 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/RecordRemoveFieldsTypeComputer.java
@@ -128,8 +128,7 @@
}
private void computeTypeFromNonConstantExpression(String funcName, ILogicalExpression expression,
- Set<String> fieldNameSet,
- List<List<String>> pathList) throws AlgebricksException {
+ Set<String> fieldNameSet, List<List<String>> pathList) throws AlgebricksException {
AbstractFunctionCallExpression funcExp = (AbstractFunctionCallExpression) expression;
List<Mutable<ILogicalExpression>> args = funcExp.getArguments();
@@ -188,8 +187,8 @@
private boolean setFieldNameSet(ILogicalExpression expr, Set<String> fieldNameSet) {
if (expr.getExpressionTag() == LogicalExpressionTag.CONSTANT) {
- AOrderedList orderedList = (AOrderedList) (((AsterixConstantValue) ((ConstantExpression) expr).getValue())
- .getObject());
+ AOrderedList orderedList =
+ (AOrderedList) (((AsterixConstantValue) ((ConstantExpression) expr).getValue()).getObject());
for (int i = 0; i < orderedList.size(); i++) {
AString as = (AString) orderedList.getItem(i);
fieldNameSet.add(as.getStringValue());
@@ -320,8 +319,7 @@
destFieldTypes.toArray(new IAType[n]), isOpen);
}
- private static ARecordType getRecordTypeFromType(String funcName, IAType type0)
- throws AlgebricksException {
+ private static ARecordType getRecordTypeFromType(String funcName, IAType type0) throws AlgebricksException {
switch (type0.getTypeTag()) {
case OBJECT:
return (ARecordType) type0;
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/SubsetCollectionTypeComputer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/SubsetCollectionTypeComputer.java
index 7867cf6..0ab49d0 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/SubsetCollectionTypeComputer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/typecomputer/impl/SubsetCollectionTypeComputer.java
@@ -55,8 +55,7 @@
case UNION: {
AUnionType ut = (AUnionType) t;
if (!ut.isUnknownableType()) {
- throw new TypeMismatchException(funcName, 0, actualTypeTag, ATypeTag.MULTISET,
- ATypeTag.ARRAY);
+ throw new TypeMismatchException(funcName, 0, actualTypeTag, ATypeTag.MULTISET, ATypeTag.ARRAY);
}
IAType t2 = ut.getActualType();
ATypeTag tag2 = t2.getTypeTag();
@@ -64,14 +63,12 @@
AbstractCollectionType act = (AbstractCollectionType) t2;
return act.getItemType();
}
- throw new TypeMismatchException(funcName, 0, actualTypeTag, ATypeTag.MULTISET,
- ATypeTag.ARRAY);
+ throw new TypeMismatchException(funcName, 0, actualTypeTag, ATypeTag.MULTISET, ATypeTag.ARRAY);
}
case ANY:
return BuiltinType.ANY;
default:
- throw new TypeMismatchException(funcName, 0, actualTypeTag, ATypeTag.MULTISET,
- ATypeTag.ARRAY);
+ throw new TypeMismatchException(funcName, 0, actualTypeTag, ATypeTag.MULTISET, ATypeTag.ARRAY);
}
}
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java
index 1c1bdbc..7cb383f 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AOrderedListType.java
@@ -77,7 +77,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", AOrderedListType.class.getName());
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java
index 6889934..825ed70 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/ATypeTag.java
@@ -130,8 +130,7 @@
}
public boolean isDerivedType() {
- return this == ATypeTag.OBJECT || this == ATypeTag.ARRAY || this == ATypeTag.MULTISET
- || this == ATypeTag.UNION;
+ return this == ATypeTag.OBJECT || this == ATypeTag.ARRAY || this == ATypeTag.MULTISET || this == ATypeTag.UNION;
}
@Override
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnionType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnionType.java
index ace3b6c..007f072 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnionType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnionType.java
@@ -196,7 +196,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", AUnionType.class.getName());
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java
index 6de918a..8d088f7 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/AUnorderedListType.java
@@ -27,7 +27,7 @@
private static final long serialVersionUID = 1L;
- public static final AUnorderedListType FULLY_OPEN_UNORDEREDLIST_TYPE = new AUnorderedListType(null,"");
+ public static final AUnorderedListType FULLY_OPEN_UNORDEREDLIST_TYPE = new AUnorderedListType(null, "");
/**
* @param itemType
@@ -77,7 +77,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode type = om.createObjectNode();
type.put("type", AUnorderedListType.class.getName());
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java
index 8ec8989..05f265c 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/EnumDeserializer.java
@@ -23,8 +23,8 @@
public class EnumDeserializer<E extends Enum<E> & IEnumSerializer> {
- public static final EnumDeserializer<ATypeTag> ATYPETAGDESERIALIZER = new EnumDeserializer<ATypeTag>(
- ATypeTag.class);
+ public static final EnumDeserializer<ATypeTag> ATYPETAGDESERIALIZER =
+ new EnumDeserializer<ATypeTag>(ATypeTag.class);
private Map<Byte, E> enumvalMap = new HashMap<Byte, E>();
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/runtime/RuntimeRecordTypeInfo.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/runtime/RuntimeRecordTypeInfo.java
index a34bd40..55108a1 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/runtime/RuntimeRecordTypeInfo.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/types/runtime/RuntimeRecordTypeInfo.java
@@ -53,10 +53,10 @@
private ARecordType cachedRecType = null;
public RuntimeRecordTypeInfo() {
- fieldNameComparator = new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY)
- .createBinaryComparator();
- fieldNameHashFunction = new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY)
- .createBinaryHashFunction();
+ fieldNameComparator =
+ new PointableBinaryComparatorFactory(UTF8StringPointable.FACTORY).createBinaryComparator();
+ fieldNameHashFunction =
+ new PointableBinaryHashFunctionFactory(UTF8StringPointable.FACTORY).createBinaryHashFunction();
writer = new UTF8StringWriter();
}
@@ -88,8 +88,8 @@
serializedFieldNameOffsets[i] = baaos.size();
writer.writeUTF8(fieldNames[i], dos);
length = baaos.size() - serializedFieldNameOffsets[i];
- hashCodeIndexPairs[i] = fieldNameHashFunction.hash(baaos.getByteArray(),
- serializedFieldNameOffsets[i], length);
+ hashCodeIndexPairs[i] =
+ fieldNameHashFunction.hash(baaos.getByteArray(), serializedFieldNameOffsets[i], length);
hashCodeIndexPairs[i] = hashCodeIndexPairs[i] << 32;
hashCodeIndexPairs[i] = hashCodeIndexPairs[i] | i;
}
diff --git a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/RecordUtil.java b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/RecordUtil.java
index e4a057f..ec1c7cb 100644
--- a/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/RecordUtil.java
+++ b/asterixdb/asterix-om/src/main/java/org/apache/asterix/om/utils/RecordUtil.java
@@ -73,7 +73,6 @@
*/
public static int computeNullBitmapSize(ARecordType recordType) {
return NonTaggedFormatUtil.hasOptionalField(recordType)
- ? (int) Math.ceil(recordType.getFieldNames().length / 4.0)
- : 0;
+ ? (int) Math.ceil(recordType.getFieldNames().length / 4.0) : 0;
}
}
diff --git a/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SerializerDeserializerTestUtils.java b/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SerializerDeserializerTestUtils.java
index 4d41092..68923a1 100644
--- a/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SerializerDeserializerTestUtils.java
+++ b/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SerializerDeserializerTestUtils.java
@@ -64,19 +64,19 @@
ARecord addr12 = new ARecord(addrRecordType,
new IAObject[] { new AString("210 University Drive"), new AString("Philadelphia"), new AString("PA"),
new AInt16((short) 10086), new AInterval(100, 300, (byte) 0) });
- ARecord addr21 = new ARecord(addrRecordType,
- new IAObject[] { new AString("1 College Street"), new AString("Seattle"), new AString("WA"),
- new AInt16((short) 20012), new AInterval(400, 500, (byte) 0) });
- ARecord addr22 = new ARecord(addrRecordType,
- new IAObject[] { new AString("20 Lindsay Avenue"), new AString("Columbus"), new AString("OH"),
- new AInt16((short) 30120), new AInterval(600, 900, (byte) 0) });
- ARecord addr31 = new ARecord(addrRecordType,
- new IAObject[] { new AString("200 14th Avenue"), new AString("Long Island"), new AString("NY"),
- new AInt16((short) 95011), new AInterval(12000, 14000, (byte) 0) });
+ ARecord addr21 =
+ new ARecord(addrRecordType, new IAObject[] { new AString("1 College Street"), new AString("Seattle"),
+ new AString("WA"), new AInt16((short) 20012), new AInterval(400, 500, (byte) 0) });
+ ARecord addr22 =
+ new ARecord(addrRecordType, new IAObject[] { new AString("20 Lindsay Avenue"), new AString("Columbus"),
+ new AString("OH"), new AInt16((short) 30120), new AInterval(600, 900, (byte) 0) });
+ ARecord addr31 =
+ new ARecord(addrRecordType, new IAObject[] { new AString("200 14th Avenue"), new AString("Long Island"),
+ new AString("NY"), new AInt16((short) 95011), new AInterval(12000, 14000, (byte) 0) });
// With nested open field addr31.
- ARecord addr32 = new ARecord(addrRecordType,
- new IAObject[] { new AString("51 8th Street"), new AString("Orlando"), new AString("FL"),
- new AInt16((short) 49045), new AInterval(190000, 200000, (byte) 0) });
+ ARecord addr32 =
+ new ARecord(addrRecordType, new IAObject[] { new AString("51 8th Street"), new AString("Orlando"),
+ new AString("FL"), new AInt16((short) 49045), new AInterval(190000, 200000, (byte) 0) });
ARecord record1 = new ARecord(employeeType, new IAObject[] { new AInt64(0L), new AString("Tom"),
new AOrderedList(addrListType, Arrays.asList(new IAObject[] { addr11, addr12 })) });
diff --git a/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SimpleSerializerDeserializerTest.java b/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SimpleSerializerDeserializerTest.java
index 8a5270f..a661f64 100644
--- a/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SimpleSerializerDeserializerTest.java
+++ b/asterixdb/asterix-om/src/test/java/org/apache/asterix/dataflow/data/nontagged/serde/SimpleSerializerDeserializerTest.java
@@ -32,8 +32,8 @@
@Test
public void test() {
Reflections reflections = new Reflections("org.apache.asterix.dataflow.data.nontagged.serde");
- Set<Class<? extends ISerializerDeserializer>> allClasses = reflections
- .getSubTypesOf(ISerializerDeserializer.class);
+ Set<Class<? extends ISerializerDeserializer>> allClasses =
+ reflections.getSubTypesOf(ISerializerDeserializer.class);
for (Class<? extends ISerializerDeserializer> cl : allClasses) {
String className = cl.getName();
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/RemoteLogsNotifier.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/RemoteLogsNotifier.java
index d63496f..366abce 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/RemoteLogsNotifier.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/logging/RemoteLogsNotifier.java
@@ -66,8 +66,8 @@
case LogType.ABORT:
// send ACK to requester
logRecord.getReplicationWorker().getChannel().socket().getOutputStream()
- .write((nodeId + ReplicationProtocol.LOG_REPLICATION_ACK + logRecord.getTxnId() + System
- .lineSeparator()).getBytes());
+ .write((nodeId + ReplicationProtocol.LOG_REPLICATION_ACK + logRecord.getTxnId()
+ + System.lineSeparator()).getBytes());
break;
case LogType.FLUSH:
checkpointReplicaIndexes(logRecord, logRecord.getDatasetId());
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/LogReplicationManager.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/LogReplicationManager.java
index b28c2f7..7a9d75f 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/LogReplicationManager.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/management/LogReplicationManager.java
@@ -217,9 +217,8 @@
}
LOGGER.error("Replica failed", e);
failedSockets.add(replicaSocket);
- Optional<ReplicationDestination> socketDest =
- destinations.entrySet().stream().filter(entry -> entry.getValue().equals(replicaSocket))
- .map(Map.Entry::getKey).findFirst();
+ Optional<ReplicationDestination> socketDest = destinations.entrySet().stream()
+ .filter(entry -> entry.getValue().equals(replicaSocket)).map(Map.Entry::getKey).findFirst();
socketDest.ifPresent(dest -> replicationManager.notifyFailure(dest, e));
}
@@ -236,8 +235,8 @@
public void run() {
Thread.currentThread().setName("TxnAckListener (" + dest + ")");
LOGGER.info("Started listening on socket: {}", dest);
- try (BufferedReader incomingResponse = new BufferedReader(
- new InputStreamReader(replicaSocket.socket().getInputStream()))) {
+ try (BufferedReader incomingResponse =
+ new BufferedReader(new InputStreamReader(replicaSocket.socket().getInputStream()))) {
while (true) {
final String response = incomingResponse.readLine();
if (response == null) {
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/ReplicateFileTask.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/ReplicateFileTask.java
index 99c7256..ca0fcca 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/ReplicateFileTask.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/messaging/ReplicateFileTask.java
@@ -74,8 +74,8 @@
// receive actual file
final Path filePath = Paths.get(resourceDir.toString(), localPath.getFile().getName());
Files.createFile(filePath);
- try (RandomAccessFile fileOutputStream = new RandomAccessFile(filePath.toFile(),
- "rw"); FileChannel fileChannel = fileOutputStream.getChannel()) {
+ try (RandomAccessFile fileOutputStream = new RandomAccessFile(filePath.toFile(), "rw");
+ FileChannel fileChannel = fileOutputStream.getChannel()) {
fileOutputStream.setLength(size);
NetworkingUtil.downloadFile(fileChannel, worker.getChannel());
fileChannel.force(true);
diff --git a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/sync/FileSynchronizer.java b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/sync/FileSynchronizer.java
index e1649b3..cc0f7b4 100644
--- a/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/sync/FileSynchronizer.java
+++ b/asterixdb/asterix-replication/src/main/java/org/apache/asterix/replication/sync/FileSynchronizer.java
@@ -55,8 +55,8 @@
ReplicateFileTask task = new ReplicateFileTask(file, filePath.getFile().length(), metadata);
ReplicationProtocol.sendTo(replica, task);
// send the file itself
- try (RandomAccessFile fromFile = new RandomAccessFile(filePath.getFile(),
- "r"); FileChannel fileChannel = fromFile.getChannel()) {
+ try (RandomAccessFile fromFile = new RandomAccessFile(filePath.getFile(), "r");
+ FileChannel fileChannel = fromFile.getChannel()) {
NetworkingUtil.sendFile(fileChannel, channel);
}
ReplicationProtocol.waitForAck(replica);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/AbstractScalarAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/AbstractScalarAggregateDescriptor.java
index 643098f..e929f0f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/AbstractScalarAggregateDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/scalar/AbstractScalarAggregateDescriptor.java
@@ -55,8 +55,8 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
// Use ScanCollection to iterate over list items.
- ScanCollectionUnnestingFunctionFactory scanCollectionFactory = new ScanCollectionUnnestingFunctionFactory(
- args[0]);
+ ScanCollectionUnnestingFunctionFactory scanCollectionFactory =
+ new ScanCollectionUnnestingFunctionFactory(args[0]);
return new GenericScalarAggregateFunction(aggFuncFactory.createAggregateEvaluator(ctx),
scanCollectionFactory, ctx);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
index 7fc264b..db12b85 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableAvgAggregateFunction.java
@@ -81,14 +81,14 @@
private ClosedRecordConstructorEval recordEval;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADouble> doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ private ISerializerDeserializer<ADouble> doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> longSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> longSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ANull> nullSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ private ISerializerDeserializer<ANull> nullSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
public AbstractSerializableAvgAggregateFunction(IScalarEvaluatorFactory[] args, IHyracksTaskContext context)
throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
index b010735..9a7cc98 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/AbstractSerializableCountAggregateFunction.java
@@ -47,11 +47,11 @@
private AMutableInt64 result = new AMutableInt64(-1);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ANull> nullSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ private ISerializerDeserializer<ANull> nullSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator eval;
@@ -75,8 +75,8 @@
boolean metNull = BufferSerDeUtil.getBoolean(state, start);
long cnt = BufferSerDeUtil.getLong(state, start + 1);
eval.evaluate(tuple, inputVal);
- ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
+ ATypeTag typeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
if (typeTag == ATypeTag.MISSING || typeTag == ATypeTag.NULL) {
processNull(state, start);
} else {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlSumAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlSumAggregateFunction.java
index 4c42f98..c42c350 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlSumAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSqlSumAggregateFunction.java
@@ -50,8 +50,7 @@
// but if all input value are system null, then we should return
// null in finish().
if (isLocalAgg) {
- throw new UnsupportedItemTypeException(BuiltinFunctions.SQL_SUM,
- ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
+ throw new UnsupportedItemTypeException(BuiltinFunctions.SQL_SUM, ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
index c4f85a2..60e34fa 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/serializable/std/SerializableSumAggregateFunction.java
@@ -58,8 +58,7 @@
// but if all input value are system null, then we should return
// null in finish().
if (isLocalAgg) {
- throw new UnsupportedItemTypeException(BuiltinFunctions.SUM,
- ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
+ throw new UnsupportedItemTypeException(BuiltinFunctions.SUM, ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
index 48eb1de..bf45c0b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractAvgAggregateFunction.java
@@ -84,14 +84,14 @@
private ClosedRecordConstructorEval recordEval;
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADouble> doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ private ISerializerDeserializer<ADouble> doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> longSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> longSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ANull> nullSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ private ISerializerDeserializer<ANull> nullSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
public AbstractAvgAggregateFunction(IScalarEvaluatorFactory[] args, IHyracksTaskContext context)
throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
index bd0243b..38e8d8b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractCountAggregateFunction.java
@@ -43,8 +43,8 @@
public abstract class AbstractCountAggregateFunction implements IAggregateEvaluator {
private AMutableInt64 result = new AMutableInt64(-1);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator eval;
protected long cnt;
@@ -64,8 +64,8 @@
@Override
public void step(IFrameTupleReference tuple) throws HyracksDataException {
eval.evaluate(tuple, inputVal);
- ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
+ ATypeTag typeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
// Ignore SYSTEM_NULL.
if (typeTag == ATypeTag.NULL || typeTag == ATypeTag.MISSING) {
processNull();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java
index b7c70aa..32c40e6 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/AbstractMinMaxAggregateFunction.java
@@ -68,8 +68,8 @@
return;
}
eval.evaluate(tuple, inputVal);
- ATypeTag typeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
+ ATypeTag typeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(inputVal.getByteArray()[inputVal.getStartOffset()]);
if (typeTag == ATypeTag.MISSING || typeTag == ATypeTag.NULL) {
processNull();
return;
@@ -81,8 +81,8 @@
// First value encountered. Set type, comparator, and initial value.
aggType = typeTag;
// Set comparator.
- IBinaryComparatorFactory cmpFactory = BinaryComparatorFactoryProvider.INSTANCE
- .getBinaryComparatorFactory(aggType, isMin);
+ IBinaryComparatorFactory cmpFactory =
+ BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(aggType, isMin);
cmp = cmpFactory.createBinaryComparator();
// Initialize min value.
outputVal.assign(inputVal);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlSumAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlSumAggregateFunction.java
index 09e8822..663ed69 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlSumAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SqlSumAggregateFunction.java
@@ -49,8 +49,7 @@
// but if all input value are system null, then we should return
// null in finish().
if (isLocalAgg) {
- throw new UnsupportedItemTypeException(BuiltinFunctions.SQL_SUM,
- ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
+ throw new UnsupportedItemTypeException(BuiltinFunctions.SQL_SUM, ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SumAggregateFunction.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SumAggregateFunction.java
index d406846..2b3beef 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SumAggregateFunction.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/std/SumAggregateFunction.java
@@ -55,8 +55,7 @@
// but if all input value are system null, then we should return
// null in finish().
if (isLocalAgg) {
- throw new UnsupportedItemTypeException(BuiltinFunctions.SUM,
- ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
+ throw new UnsupportedItemTypeException(BuiltinFunctions.SUM, ATypeTag.SERIALIZED_SYSTEM_NULL_TYPE_TAG);
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/EmptyStreamAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/EmptyStreamAggregateDescriptor.java
index 66d81f6..dc2b355 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/EmptyStreamAggregateDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/EmptyStreamAggregateDescriptor.java
@@ -62,8 +62,8 @@
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
@SuppressWarnings("rawtypes")
- private ISerializerDeserializer serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
boolean res = true;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/NonEmptyStreamAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/NonEmptyStreamAggregateDescriptor.java
index 2c8f409..e9330d3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/NonEmptyStreamAggregateDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/aggregates/stream/NonEmptyStreamAggregateDescriptor.java
@@ -60,8 +60,8 @@
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
@SuppressWarnings("rawtypes")
- private ISerializerDeserializer serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
boolean res = false;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java
index 857ac2e..e10bd07 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleCenterAccessor.java
@@ -72,8 +72,8 @@
private final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
private final AMutablePoint aPoint = new AMutablePoint(0, 0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<APoint> pointSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.APOINT);
+ private final ISerializerDeserializer<APoint> pointSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.APOINT);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java
index fcc0b0a..2fc24e4 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/CircleRadiusAccessor.java
@@ -70,8 +70,8 @@
private final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
private final AMutableDouble aDouble = new AMutableDouble(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADouble> doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ private final ISerializerDeserializer<ADouble> doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java
index 7330cc1..66de80a 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/LineRectanglePolygonAccessor.java
@@ -80,8 +80,8 @@
private final AOrderedListType pointListType = new AOrderedListType(BuiltinType.APOINT, null);
private final AMutablePoint aPoint = new AMutablePoint(0, 0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<APoint> pointSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.APOINT);
+ private final ISerializerDeserializer<APoint> pointSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.APOINT);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java
index a90e6c63..0aa38f3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointXCoordinateAccessor.java
@@ -70,8 +70,8 @@
private final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
private final AMutableDouble aDouble = new AMutableDouble(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADouble> doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ private final ISerializerDeserializer<ADouble> doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java
index 5a0840c..cfaa5d5 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/PointYCoordinateAccessor.java
@@ -74,8 +74,8 @@
private final IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
private final AMutableDouble aDouble = new AMutableDouble(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADouble> doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ private final ISerializerDeserializer<ADouble> doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
index c19acde..4e62659 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalDayAccessor.java
@@ -79,8 +79,8 @@
// for output: type integer
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aMutableInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
index 08bb71d..7ba57f9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalHourAccessor.java
@@ -81,8 +81,8 @@
// for output: type integer
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aMutableInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
index d3ac99b..0d5d01c 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndAccessor.java
@@ -75,16 +75,16 @@
// possible output
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private final ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
private final AMutableDate aDate = new AMutableDate(0);
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<ADateTime> datetimeSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
private final AMutableDateTime aDateTime = new AMutableDateTime(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private final ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private final AMutableTime aTime = new AMutableTime(0);
@Override
@@ -96,8 +96,8 @@
resultStorage.reset();
try {
if (bytes[startOffset] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (endTime));
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java
index 5c888eb..b83cbd1 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDateAccessor.java
@@ -71,8 +71,8 @@
// possible output
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private final ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
private final AMutableDate aDate = new AMutableDate(0);
@Override
@@ -84,8 +84,8 @@
resultStorage.reset();
try {
if (bytes[startOffset] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (endTime));
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java
index 66e6e02..c806db1 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndDatetimeAccessor.java
@@ -86,8 +86,8 @@
resultStorage.reset();
try {
if (bytes[startOffset] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
aDateTime.setValue(endTime);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java
index 833079b..8131385 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalEndTimeAccessor.java
@@ -70,8 +70,8 @@
// possible output
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private final ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private final AMutableTime aTime = new AMutableTime(0);
@Override
@@ -83,8 +83,8 @@
resultStorage.reset();
try {
if (bytes[startOffset] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
long endTime = AIntervalSerializerDeserializer.getIntervalEnd(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
aTime.setValue((int) (endTime));
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
index 0c9639f..04da6d2 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartAccessor.java
@@ -75,16 +75,16 @@
// possible output
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private final ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
private final AMutableDate aDate = new AMutableDate(0);
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<ADateTime> datetimeSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
private final AMutableDateTime aDateTime = new AMutableDateTime(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private final ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private final AMutableTime aTime = new AMutableTime(0);
@Override
@@ -96,10 +96,10 @@
resultStorage.reset();
try {
if (bytes[startOffset] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
- long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
+ long startTime =
+ AIntervalSerializerDeserializer.getIntervalStart(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (startTime));
dateSerde.serialize(aDate, out);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java
index f3a6dfd..971fd13 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDateAccessor.java
@@ -73,8 +73,8 @@
// possible output
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private final ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
private final AMutableDate aDate = new AMutableDate(0);
@Override
@@ -86,10 +86,10 @@
resultStorage.reset();
try {
if (bytes[startOffset] == SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
- long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
+ long startTime =
+ AIntervalSerializerDeserializer.getIntervalStart(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_DATE_TYPE_TAG) {
aDate.setValue((int) (startTime));
dateSerde.serialize(aDate, out);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java
index 61aeaff..d785e90 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartDatetimeAccessor.java
@@ -83,10 +83,10 @@
resultStorage.reset();
try {
if (bytes[startOffset] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
- long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
+ long startTime =
+ AIntervalSerializerDeserializer.getIntervalStart(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_DATETIME_TYPE_TAG) {
aDateTime.setValue(startTime);
datetimeSerde.serialize(aDateTime, out);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java
index 649f7b9..2075b95 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalIntervalStartTimeAccessor.java
@@ -71,8 +71,8 @@
// possible output
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private final ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private final AMutableTime aTime = new AMutableTime(0);
@Override
@@ -84,10 +84,10 @@
resultStorage.reset();
try {
if (bytes[startOffset] == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG) {
- byte timeType = AIntervalSerializerDeserializer.getIntervalTimeType(bytes,
- startOffset + 1);
- long startTime = AIntervalSerializerDeserializer.getIntervalStart(bytes,
- startOffset + 1);
+ byte timeType =
+ AIntervalSerializerDeserializer.getIntervalTimeType(bytes, startOffset + 1);
+ long startTime =
+ AIntervalSerializerDeserializer.getIntervalStart(bytes, startOffset + 1);
if (timeType == ATypeTag.SERIALIZED_TIME_TYPE_TAG) {
aTime.setValue((int) (startTime));
timeSerde.serialize(aTime, out);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
index 9777d0a..292f410 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMillisecondAccessor.java
@@ -79,8 +79,8 @@
// for output: type integer
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aMutableInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
index eb264e1..a92344b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMinuteAccessor.java
@@ -82,8 +82,8 @@
// for output: type integer
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aMutableInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
index da8333b..bbdc9a0 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalMonthAccessor.java
@@ -79,8 +79,8 @@
// for output: type integer
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aMutableInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
index f8c3816..ecee642 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalSecondAccessor.java
@@ -79,8 +79,8 @@
// for output: type integer
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aMutableInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
index 37ac977..49d841c 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/accessors/TemporalYearAccessor.java
@@ -80,8 +80,8 @@
// for output: type integer
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aMutableInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
index 0782f06..32196e5 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/CreateMBREvalFactory.java
@@ -327,10 +327,10 @@
}
break;
default:
- throw new TypeMismatchException(BuiltinFunctions.CREATE_MBR, 0,
- data0[startOffset0], ATypeTag.SERIALIZED_POINT_TYPE_TAG,
- ATypeTag.SERIALIZED_LINE_TYPE_TAG, ATypeTag.SERIALIZED_POLYGON_TYPE_TAG,
- ATypeTag.SERIALIZED_CIRCLE_TYPE_TAG, ATypeTag.SERIALIZED_RECTANGLE_TYPE_TAG);
+ throw new TypeMismatchException(BuiltinFunctions.CREATE_MBR, 0, data0[startOffset0],
+ ATypeTag.SERIALIZED_POINT_TYPE_TAG, ATypeTag.SERIALIZED_LINE_TYPE_TAG,
+ ATypeTag.SERIALIZED_POLYGON_TYPE_TAG, ATypeTag.SERIALIZED_CIRCLE_TYPE_TAG,
+ ATypeTag.SERIALIZED_RECTANGLE_TYPE_TAG);
}
} else {
throw new NotImplementedException(dimension + "D is not supported");
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceCheckEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceCheckEvaluator.java
index b6e9ae9..dece292 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceCheckEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceCheckEvaluator.java
@@ -50,8 +50,8 @@
protected final OrderedListBuilder listBuilder;
protected ArrayBackedValueStorage listItemVal;
@SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ protected final ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
public EditDistanceCheckEvaluator(IScalarEvaluatorFactory[] args, IHyracksTaskContext context)
throws HyracksDataException {
@@ -65,11 +65,11 @@
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
firstStringEval.evaluate(tuple, argPtr1);
- firstTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
+ firstTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
secondStringEval.evaluate(tuple, argPtr2);
- secondTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
+ secondTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
edThreshEval.evaluate(tuple, argPtrThreshold);
if (!checkArgTypes(firstTypeTag, secondTypeTag)) {
@@ -92,8 +92,7 @@
}
@Override
- protected int computeResult(IPointable left, IPointable right, ATypeTag argType)
- throws HyracksDataException {
+ protected int computeResult(IPointable left, IPointable right, ATypeTag argType) throws HyracksDataException {
byte[] leftBytes = left.getByteArray();
int leftStartOffset = left.getStartOffset();
byte[] rightBytes = right.getByteArray();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceEvaluator.java
index 3df6e83..85fd334 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/EditDistanceEvaluator.java
@@ -59,8 +59,8 @@
protected int editDistance = 0;
protected final AMutableInt64 aInt64 = new AMutableInt64(-1);
@SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ protected final ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
protected ATypeTag itemTypeTag;
protected ATypeTag firstTypeTag;
@@ -76,11 +76,11 @@
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
firstStringEval.evaluate(tuple, argPtr1);
- firstTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
+ firstTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
secondStringEval.evaluate(tuple, argPtr2);
- secondTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
+ secondTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
if (!checkArgTypes(firstTypeTag, secondTypeTag)) {
result.set(resultStorage);
@@ -96,8 +96,7 @@
result.set(resultStorage);
}
- protected int computeResult(IPointable left, IPointable right, ATypeTag argType)
- throws HyracksDataException {
+ protected int computeResult(IPointable left, IPointable right, ATypeTag argType) throws HyracksDataException {
byte[] leftBytes = left.getByteArray();
int leftStartOffset = left.getStartOffset();
byte[] rightBytes = right.getByteArray();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/FullTextContainsEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/FullTextContainsEvaluator.java
index b93d613..3bd2587 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/FullTextContainsEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/FullTextContainsEvaluator.java
@@ -57,10 +57,10 @@
protected final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
protected final DataOutput out = resultStorage.getDataOutput();
- protected final TaggedValuePointable argLeft = (TaggedValuePointable) TaggedValuePointable.FACTORY
- .createPointable();
- protected final TaggedValuePointable argRight = (TaggedValuePointable) TaggedValuePointable.FACTORY
- .createPointable();
+ protected final TaggedValuePointable argLeft =
+ (TaggedValuePointable) TaggedValuePointable.FACTORY.createPointable();
+ protected final TaggedValuePointable argRight =
+ (TaggedValuePointable) TaggedValuePointable.FACTORY.createPointable();
protected TaggedValuePointable[] argOptions;
protected final IScalarEvaluator evalLeft;
protected final IScalarEvaluator evalRight;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityFiltersCache.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityFiltersCache.java
index 37b06af..4f5fb69 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityFiltersCache.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityFiltersCache.java
@@ -45,8 +45,8 @@
bbis.setByteBuffer(ByteBuffer.wrap(similarityNameBytes), startOffset + 1);
String similarityName = utf8SerDer.deserialize(dis);
similarityNameBytesCached = Arrays.copyOfRange(similarityNameBytes, startOffset, len);
- similarityFiltersCached = SimilarityFiltersFactory.getSimilarityFilters(similarityName,
- similarityThreshold);
+ similarityFiltersCached =
+ SimilarityFiltersFactory.getSimilarityFilters(similarityName, similarityThreshold);
}
return similarityFiltersCached;
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardCheckEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardCheckEvaluator.java
index 4f7a30f..60b5592 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardCheckEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardCheckEvaluator.java
@@ -48,8 +48,8 @@
protected OrderedListBuilder listBuilder;
protected ArrayBackedValueStorage inputVal;
@SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ protected final ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
protected final AOrderedListType listType = new AOrderedListType(BuiltinType.ANY, "list");
public SimilarityJaccardCheckEvaluator(IScalarEvaluatorFactory[] args, IHyracksTaskContext context)
@@ -68,15 +68,15 @@
secondOrdListEval.evaluate(tuple, argPtr2);
jaccThreshEval.evaluate(tuple, jaccThreshPointable);
- firstTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
- secondTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
+ firstTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
+ secondTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
- firstItemTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset() + 1]);
- secondItemTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset() + 1]);
+ firstItemTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset() + 1]);
+ secondItemTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset() + 1]);
jaccThresh = AFloatSerializerDeserializer.getFloat(jaccThreshPointable.getByteArray(),
jaccThreshPointable.getStartOffset() + TYPE_INDICATOR_SIZE);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardEvaluator.java
index f69248d..1e5ad3c 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/common/SimilarityJaccardEvaluator.java
@@ -73,8 +73,8 @@
protected final AMutableFloat aFloat = new AMutableFloat(0);
@SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<AFloat> floatSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
+ protected final ISerializerDeserializer<AFloat> floatSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AFLOAT);
protected ATypeTag firstTypeTag;
protected ATypeTag secondTypeTag;
@@ -107,15 +107,15 @@
firstOrdListEval.evaluate(tuple, argPtr1);
secondOrdListEval.evaluate(tuple, argPtr2);
- firstTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
- secondTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
+ firstTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset()]);
+ secondTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset()]);
- firstItemTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset() + 1]);
- secondItemTypeTag = EnumDeserializer.ATYPETAGDESERIALIZER
- .deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset() + 1]);
+ firstItemTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr1.getByteArray()[argPtr1.getStartOffset() + 1]);
+ secondItemTypeTag =
+ EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(argPtr2.getByteArray()[argPtr2.getStartOffset() + 1]);
if (!checkArgTypes(firstTypeTag, secondTypeTag)) {
result.set(resultStorage);
@@ -229,10 +229,10 @@
return;
}
- IBinaryHashFunction putHashFunc = ListItemBinaryHashFunctionFactory.INSTANCE
- .createBinaryHashFunction(buildItemTypeTag, ignoreCase);
- IBinaryHashFunction getHashFunc = ListItemBinaryHashFunctionFactory.INSTANCE
- .createBinaryHashFunction(probeItemTypeTag, ignoreCase);
+ IBinaryHashFunction putHashFunc =
+ ListItemBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction(buildItemTypeTag, ignoreCase);
+ IBinaryHashFunction getHashFunc =
+ ListItemBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction(probeItemTypeTag, ignoreCase);
IBinaryComparator cmp = ListItemBinaryComparatorFactory.INSTANCE.createBinaryComparator(buildItemTypeTag,
probeItemTypeTag, ignoreCase);
hashMap = new BinaryHashMap(hashTableSize, TABLE_FRAME_SIZE, putHashFunc, getHashFunc, cmp);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
index a1e60a6..620c543 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/AbstractComparisonEvaluator.java
@@ -52,11 +52,11 @@
private ComparisonHelper ch = new ComparisonHelper();
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ABoolean> serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ protected ISerializerDeserializer<ABoolean> serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<ANull> nullSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ protected ISerializerDeserializer<ANull> nullSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
public AbstractComparisonEvaluator(IScalarEvaluatorFactory evalLeftFactory,
IScalarEvaluatorFactory evalRightFactory, IHyracksTaskContext context) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/ComparisonHelper.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/ComparisonHelper.java
index e4aa4ad..163bd9f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/ComparisonHelper.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/ComparisonHelper.java
@@ -51,28 +51,28 @@
private static final long serialVersionUID = 1L;
static final String COMPARISON = "comparison operations (>, >=, <, and <=)";
- private final IBinaryComparator strBinaryComp = BinaryComparatorFactoryProvider.UTF8STRING_POINTABLE_INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator circleBinaryComp = ACirclePartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator durationBinaryComp = ADurationPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator intervalBinaryComp = AIntervalAscPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator lineBinaryComparator = ALinePartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator pointBinaryComparator = APointPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator point3DBinaryComparator = APoint3DPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator polygonBinaryComparator = APolygonPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator rectangleBinaryComparator = ARectanglePartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator uuidBinaryComparator = AUUIDPartialBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- private final IBinaryComparator byteArrayComparator = new PointableBinaryComparatorFactory(
- ByteArrayPointable.FACTORY).createBinaryComparator();
+ private final IBinaryComparator strBinaryComp =
+ BinaryComparatorFactoryProvider.UTF8STRING_POINTABLE_INSTANCE.createBinaryComparator();
+ private final IBinaryComparator circleBinaryComp =
+ ACirclePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator durationBinaryComp =
+ ADurationPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator intervalBinaryComp =
+ AIntervalAscPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator lineBinaryComparator =
+ ALinePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator pointBinaryComparator =
+ APointPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator point3DBinaryComparator =
+ APoint3DPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator polygonBinaryComparator =
+ APolygonPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator rectangleBinaryComparator =
+ ARectanglePartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator uuidBinaryComparator =
+ AUUIDPartialBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ private final IBinaryComparator byteArrayComparator =
+ new PointableBinaryComparatorFactory(ByteArrayPointable.FACTORY).createBinaryComparator();
public int compare(ATypeTag typeTag1, ATypeTag typeTag2, IPointable arg1, IPointable arg2)
throws HyracksDataException {
@@ -177,7 +177,7 @@
private int compareStringWithArg(ATypeTag typeTag2, IPointable arg1, IPointable arg2) throws HyracksDataException {
if (typeTag2 == ATypeTag.STRING) {
return strBinaryComp.compare(arg1.getByteArray(), arg1.getStartOffset(), arg1.getLength() - 1,
- arg2.getByteArray(), arg2.getStartOffset(), arg2.getLength() - 1);
+ arg2.getByteArray(), arg2.getStartOffset(), arg2.getLength() - 1);
}
throw new IncompatibleTypeException(COMPARISON, ATypeTag.SERIALIZED_STRING_TYPE_TAG, typeTag2.serialize());
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/DeepEqualAssessor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/DeepEqualAssessor.java
index 1faf75c..21b19aa 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/DeepEqualAssessor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/comparisons/DeepEqualAssessor.java
@@ -60,9 +60,9 @@
if (ATypeHierarchy.isSameTypeDomain(leftTypeTag, rightTypeTag, false)
&& ATypeHierarchy.getTypeDomain(leftTypeTag) == Domain.NUMERIC) {
double leftVal = ATypeHierarchy.getDoubleValue(DEEP_EQUAL, 0, leftPointable.getByteArray(),
- leftPointable.getStartOffset());
+ leftPointable.getStartOffset());
double rightVal = ATypeHierarchy.getDoubleValue(DEEP_EQUAL, 1, rightPointable.getByteArray(),
- rightPointable.getStartOffset());
+ rightPointable.getStartOffset());
return Math.abs(leftVal - rightVal) < EPSILON;
} else {
return false;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalConstructorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalConstructorDescriptor.java
index 5b9a36b..9309591 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalConstructorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalConstructorDescriptor.java
@@ -74,8 +74,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ private ISerializerDeserializer<AInterval> intervalSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
index a8597c9..0e1942ec 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateConstructorDescriptor.java
@@ -84,8 +84,8 @@
private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
private AMutableDuration aDuration = new AMutableDuration(0, 0L);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ private ISerializerDeserializer<AInterval> intervalSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
index 6d56b1b..3e61405 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromDateTimeConstructorDescriptor.java
@@ -82,8 +82,8 @@
private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
private AMutableDuration aDuration = new AMutableDuration(0, 0L);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ private ISerializerDeserializer<AInterval> intervalSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
index d64e468..fa402bd 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/AIntervalStartFromTimeConstructorDescriptor.java
@@ -82,8 +82,8 @@
private AMutableInterval aInterval = new AMutableInterval(0L, 0L, (byte) 0);
private AMutableDuration aDuration = new AMutableDuration(0, 0L);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInterval> intervalSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINTERVAL);
+ private ISerializerDeserializer<AInterval> intervalSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
index 56f6e38..c338a35 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/constructors/ATimeConstructorDescriptor.java
@@ -70,8 +70,8 @@
private IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
private AMutableTime aTime = new AMutableTime(0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
@Override
@@ -97,8 +97,8 @@
ATypeTag.SERIALIZED_POLYGON_TYPE_TAG);
}
- int chrononTimeInMs = ATimeParserFactory.parseTimePart(serString, startOffset,
- stringLength);
+ int chrononTimeInMs =
+ ATimeParserFactory.parseTimePart(serString, startOffset, stringLength);
if (chrononTimeInMs < 0) {
chrononTimeInMs += GregorianCalendarSystem.CHRONON_OF_DAY;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java
index dce2462..9c38fbc 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringBoolEval.java
@@ -35,8 +35,8 @@
// For outputting results.
@SuppressWarnings({ "rawtypes" })
- private ISerializerDeserializer boolSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer boolSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
public AbstractBinaryStringBoolEval(IHyracksTaskContext context, IScalarEvaluatorFactory evalLeftFactory,
IScalarEvaluatorFactory evalRightFactory, FunctionIdentifier funcID) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringIntEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringIntEval.java
index 5aaee4c..ebe1273 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringIntEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractBinaryStringIntEval.java
@@ -36,8 +36,8 @@
// For outputting results.
@SuppressWarnings({ "rawtypes" })
- private ISerializerDeserializer intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ private ISerializerDeserializer intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
private AMutableInt32 resultValue = new AMutableInt32(0);
public AbstractBinaryStringIntEval(IHyracksTaskContext context, IScalarEvaluatorFactory evalLeftFactory,
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractNumericArithmeticEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractNumericArithmeticEval.java
index 45e9ee1..936415d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractNumericArithmeticEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractNumericArithmeticEval.java
@@ -279,8 +279,7 @@
}
@SuppressWarnings("unchecked")
- private void evaluateTemporalArthmeticOperation(ATypeTag leftType)
- throws HyracksDataException {
+ private void evaluateTemporalArthmeticOperation(ATypeTag leftType) throws HyracksDataException {
byte[] bytes1 = argPtr1.getByteArray();
int offset1 = argPtr1.getStartOffset();
ATypeTag rightType = EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(bytes1[offset1]);
@@ -318,10 +317,10 @@
AYearMonthDurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1));
break;
case DAYTIMEDURATION:
- leftChronon = ADayTimeDurationSerializerDeserializer.getDayTime(bytes0,
- offset0 + 1);
- rightChronon = ADayTimeDurationSerializerDeserializer.getDayTime(bytes1,
- offset1 + 1);
+ leftChronon =
+ ADayTimeDurationSerializerDeserializer.getDayTime(bytes0, offset0 + 1);
+ rightChronon =
+ ADayTimeDurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
break;
default:
throw new UnsupportedTypeException(getIdentifier(), bytes1[offset1]);
@@ -354,8 +353,8 @@
break;
case DURATION:
dayTime = ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
- yearMonth = ADurationSerializerDeserializer.getYearMonth(bytes1,
- offset1 + 1);
+ yearMonth =
+ ADurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1);
break;
default:
throw new IncompatibleTypeException(getIdentifier(), bytes0[offset0],
@@ -377,8 +376,8 @@
}
switch (rightType) {
case DURATION:
- yearMonth = ADurationSerializerDeserializer.getYearMonth(bytes1,
- offset1 + 1);
+ yearMonth =
+ ADurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1);
dayTime = ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
break;
case YEARMONTHDURATION:
@@ -395,8 +394,8 @@
}
break;
case YEARMONTHDURATION:
- yearMonth = AYearMonthDurationSerializerDeserializer.getYearMonth(bytes0,
- offset0 + 1);
+ yearMonth =
+ AYearMonthDurationSerializerDeserializer.getYearMonth(bytes0, offset0 + 1);
switch (rightType) {
case DATETIME:
serde = SerializerDeserializerProvider.INSTANCE
@@ -421,8 +420,8 @@
dayTime = ADurationSerializerDeserializer.getDayTime(bytes0, offset0 + 1);
case DAYTIMEDURATION:
if (leftType == ATypeTag.DAYTIMEDURATION) {
- dayTime = ADayTimeDurationSerializerDeserializer.getDayTime(bytes0,
- offset0 + 1);
+ dayTime =
+ ADayTimeDurationSerializerDeserializer.getDayTime(bytes0, offset0 + 1);
}
switch (rightType) {
case DATETIME:
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java
index b607309..1b7c679 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractQuadStringStringEval.java
@@ -59,8 +59,8 @@
private AMutableString resultBuffer = new AMutableString("");
@SuppressWarnings("rawtypes")
- private ISerializerDeserializer strSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ private ISerializerDeserializer strSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
private final UTF8StringPointable strPtr1st = new UTF8StringPointable();
private final UTF8StringPointable strPtr2nd = new UTF8StringPointable();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java
index c7d839e..830f2ff 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringBoolEval.java
@@ -34,8 +34,8 @@
public abstract class AbstractTripleStringBoolEval extends AbstractTripleStringEval {
@SuppressWarnings("rawtypes")
- private ISerializerDeserializer boolSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer boolSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
public AbstractTripleStringBoolEval(IHyracksTaskContext context, IScalarEvaluatorFactory eval0,
IScalarEvaluatorFactory eval1, IScalarEvaluatorFactory eval2, FunctionIdentifier funcID)
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringIntEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringIntEval.java
index 2aa2f1a..7f0076b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringIntEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringIntEval.java
@@ -35,8 +35,8 @@
public abstract class AbstractTripleStringIntEval extends AbstractTripleStringEval {
@SuppressWarnings("rawtypes")
- private final ISerializerDeserializer intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ private final ISerializerDeserializer intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
private final AMutableInt32 resultValue = new AMutableInt32(0);
public AbstractTripleStringIntEval(IHyracksTaskContext context, IScalarEvaluatorFactory eval0,
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java
index b7bd056..48ef5f7 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractTripleStringStringEval.java
@@ -34,8 +34,8 @@
public abstract class AbstractTripleStringStringEval extends AbstractTripleStringEval {
@SuppressWarnings("rawtypes")
- private final ISerializerDeserializer stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ private final ISerializerDeserializer stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
private final AMutableString resultValue = new AMutableString("");
public AbstractTripleStringStringEval(IHyracksTaskContext context, IScalarEvaluatorFactory eval0,
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractUnaryNumericFunctionEval.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractUnaryNumericFunctionEval.java
index c56d58f..06dcc17 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractUnaryNumericFunctionEval.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/AbstractUnaryNumericFunctionEval.java
@@ -72,23 +72,23 @@
private final IScalarEvaluator argEval;
@SuppressWarnings("rawtypes")
- protected ISerializerDeserializer int8Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT8);
+ protected ISerializerDeserializer int8Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT8);
@SuppressWarnings("rawtypes")
- protected ISerializerDeserializer int16Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT16);
+ protected ISerializerDeserializer int16Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT16);
@SuppressWarnings("rawtypes")
- protected ISerializerDeserializer int32Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ protected ISerializerDeserializer int32Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@SuppressWarnings("rawtypes")
- protected ISerializerDeserializer int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ protected ISerializerDeserializer int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@SuppressWarnings("rawtypes")
- protected ISerializerDeserializer floatSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
+ protected ISerializerDeserializer floatSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AFLOAT);
@SuppressWarnings("rawtypes")
- protected ISerializerDeserializer doubleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ protected ISerializerDeserializer doubleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
// The function identifier, used for error messages.
private final FunctionIdentifier funcID;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/BinaryHashMap.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/BinaryHashMap.java
index 2864473..f0edf5f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/BinaryHashMap.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/BinaryHashMap.java
@@ -57,9 +57,8 @@
private int nextOff;
private int size;
-
- public BinaryHashMap(int tableSize, int frameSize, IBinaryHashFunction putHashFunc,
- IBinaryHashFunction getHashFunc, IBinaryComparator cmp) {
+ public BinaryHashMap(int tableSize, int frameSize, IBinaryHashFunction putHashFunc, IBinaryHashFunction getHashFunc,
+ IBinaryComparator cmp) {
listHeads = new long[tableSize];
this.frameSize = frameSize;
this.putHashFunc = putHashFunc;
@@ -198,8 +197,8 @@
}
public class BinaryHashMapIterator implements Iterator<Pair<BinaryEntry, BinaryEntry>> {
- private final Pair<BinaryEntry, BinaryEntry> val = new Pair<BinaryEntry, BinaryEntry>(new BinaryEntry(),
- new BinaryEntry());
+ private final Pair<BinaryEntry, BinaryEntry> val =
+ new Pair<BinaryEntry, BinaryEntry>(new BinaryEntry(), new BinaryEntry());
private int listHeadIndex;
private ByteBuffer frame;
private int frameIndex;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CountHashedGramTokensDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CountHashedGramTokensDescriptor.java
index 8998122..dd36671 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CountHashedGramTokensDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CountHashedGramTokensDescriptor.java
@@ -58,8 +58,8 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
ITokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(3, true, false, true,
- tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(3, true, false, true, tokenFactory);
return new GramTokensEvaluator(args, ctx, tokenizer, BuiltinType.AINT32);
}
};
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateCircleDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateCircleDescriptor.java
index 7dbba41..4866583 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateCircleDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateCircleDescriptor.java
@@ -75,8 +75,8 @@
private AMutablePoint aPoint = new AMutablePoint(0, 0);
private AMutableCircle aCircle = new AMutableCircle(null, 0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ACircle> circleSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ACIRCLE);
+ private ISerializerDeserializer<ACircle> circleSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ACIRCLE);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateLineDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateLineDescriptor.java
index 8165a55..0c8c12f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateLineDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateLineDescriptor.java
@@ -75,8 +75,8 @@
private AMutableLine aLine = new AMutableLine(null, null);
private AMutablePoint[] aPoint = { new AMutablePoint(0, 0), new AMutablePoint(0, 0) };
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ALine> lineSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ALINE);
+ private ISerializerDeserializer<ALine> lineSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ALINE);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
@@ -104,15 +104,15 @@
ADoubleSerializerDeserializer.getDouble(bytes0,
offset0 + APointSerializerDeserializer
.getCoordinateOffset(Coordinate.X)),
- ADoubleSerializerDeserializer.getDouble(bytes0,
- offset0 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
+ ADoubleSerializerDeserializer.getDouble(bytes0, offset0
+ + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
aPoint[1]
.setValue(
ADoubleSerializerDeserializer.getDouble(bytes1,
offset1 + APointSerializerDeserializer
.getCoordinateOffset(Coordinate.X)),
- ADoubleSerializerDeserializer.getDouble(bytes1,
- offset1 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
+ ADoubleSerializerDeserializer.getDouble(bytes1, offset1
+ + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
aLine.setValue(aPoint[0], aPoint[1]);
lineSerde.serialize(aLine, out);
} catch (IOException e1) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
index 9399f6b..e34091b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreatePointDescriptor.java
@@ -69,8 +69,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
private AMutablePoint aPoint = new AMutablePoint(0, 0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<APoint> pointSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.APOINT);
+ private ISerializerDeserializer<APoint> pointSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.APOINT);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateRectangleDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateRectangleDescriptor.java
index b51042f..59cc2c1 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateRectangleDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateRectangleDescriptor.java
@@ -104,15 +104,15 @@
ADoubleSerializerDeserializer.getDouble(bytes0,
offset0 + APointSerializerDeserializer
.getCoordinateOffset(Coordinate.X)),
- ADoubleSerializerDeserializer.getDouble(bytes0,
- offset0 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
+ ADoubleSerializerDeserializer.getDouble(bytes0, offset0
+ + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
aPoint[1]
.setValue(
ADoubleSerializerDeserializer.getDouble(bytes1,
offset1 + APointSerializerDeserializer
.getCoordinateOffset(Coordinate.X)),
- ADoubleSerializerDeserializer.getDouble(bytes1,
- offset1 + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
+ ADoubleSerializerDeserializer.getDouble(bytes1, offset1
+ + APointSerializerDeserializer.getCoordinateOffset(Coordinate.Y)));
if (aPoint[0].getX() > aPoint[1].getX() && aPoint[0].getY() > aPoint[1].getY()) {
aRectangle.setValue(aPoint[1], aPoint[0]);
} else if (aPoint[0].getX() < aPoint[1].getX() && aPoint[0].getY() < aPoint[1].getY()) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateUUIDDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateUUIDDescriptor.java
index 829f684..8f43bb9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateUUIDDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/CreateUUIDDescriptor.java
@@ -56,8 +56,8 @@
private static final long serialVersionUID = 1L;
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AUUID> uuidSerDe = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AUUID);
+ private final ISerializerDeserializer<AUUID> uuidSerDe =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AUUID);
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/DeepEqualityDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/DeepEqualityDescriptor.java
index 51497ed..79c63e7 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/DeepEqualityDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/DeepEqualityDescriptor.java
@@ -76,8 +76,8 @@
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ABoolean> boolSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private final ISerializerDeserializer<ABoolean> boolSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/EditDistanceListIsFilterableDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/EditDistanceListIsFilterableDescriptor.java
index 9c4fb41..0f4ebee 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/EditDistanceListIsFilterableDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/EditDistanceListIsFilterableDescriptor.java
@@ -92,8 +92,8 @@
protected final IScalarEvaluator edThreshEval;
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private final ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
public EditDistanceListIsFilterableEvaluator(IScalarEvaluatorFactory[] args, IHyracksTaskContext context)
throws HyracksDataException {
@@ -122,16 +122,15 @@
listLen = AOrderedListSerializerDeserializer.getNumberOfItems(bytes, offset);
break;
default:
- throw new TypeMismatchException(BuiltinFunctions.EDIT_DISTANCE_LIST_IS_FILTERABLE,
- 0, ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG,
- ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG);
+ throw new TypeMismatchException(BuiltinFunctions.EDIT_DISTANCE_LIST_IS_FILTERABLE, 0,
+ ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG, ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG);
}
// Check type and extract edit-distance threshold.
bytes = edThreshPtr.getByteArray();
offset = edThreshPtr.getStartOffset();
- long edThresh = ATypeHierarchy.getIntegerValue(
- BuiltinFunctions.EDIT_DISTANCE_LIST_IS_FILTERABLE.getName(), 1, bytes, offset);
+ long edThresh = ATypeHierarchy.getIntegerValue(BuiltinFunctions.EDIT_DISTANCE_LIST_IS_FILTERABLE.getName(),
+ 1, bytes, offset);
// Compute result.
long lowerBound = listLen - edThresh;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsDescriptor.java
index 48541cb..810f6c4 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsDescriptor.java
@@ -47,10 +47,10 @@
public static final String CONJUNCTIVE_SEARCH_MODE_OPTION = "all";
private static final byte[] SEARCH_MODE_OPTION_ARRAY = UTF8StringUtil.writeStringToBytes(SEARCH_MODE_OPTION);
- private static final byte[] DISJUNCTIVE_SEARCH_MODE_OPTION_ARRAY = UTF8StringUtil
- .writeStringToBytes(DISJUNCTIVE_SEARCH_MODE_OPTION);
- private static final byte[] CONJUNCTIVE_SEARCH_MODE_OPTION_ARRAY = UTF8StringUtil
- .writeStringToBytes(CONJUNCTIVE_SEARCH_MODE_OPTION);
+ private static final byte[] DISJUNCTIVE_SEARCH_MODE_OPTION_ARRAY =
+ UTF8StringUtil.writeStringToBytes(DISJUNCTIVE_SEARCH_MODE_OPTION);
+ private static final byte[] CONJUNCTIVE_SEARCH_MODE_OPTION_ARRAY =
+ UTF8StringUtil.writeStringToBytes(CONJUNCTIVE_SEARCH_MODE_OPTION);
static {
paramTypeMap.put(SEARCH_MODE_OPTION, ATypeTag.STRING);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsWithoutOptionDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsWithoutOptionDescriptor.java
index 7cfaa62..6ab87ac 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsWithoutOptionDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/FullTextContainsWithoutOptionDescriptor.java
@@ -64,5 +64,4 @@
return BuiltinFunctions.FULLTEXT_CONTAINS_WO_OPTION;
}
-
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GramTokensDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GramTokensDescriptor.java
index ec545ff..190013b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GramTokensDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/GramTokensDescriptor.java
@@ -56,8 +56,8 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
ITokenFactory tokenFactory = new UTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(3, true, true, true,
- tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(3, true, true, true, tokenFactory);
return new GramTokensEvaluator(args, ctx, tokenizer, BuiltinType.ASTRING);
}
};
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/HashedGramTokensDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/HashedGramTokensDescriptor.java
index 49d9f27..32dc292 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/HashedGramTokensDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/HashedGramTokensDescriptor.java
@@ -56,8 +56,8 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
ITokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(3, true, true, true,
- tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(3, true, true, true, tokenFactory);
return new GramTokensEvaluator(args, ctx, tokenizer, BuiltinType.AINT32);
}
};
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java
index cc0ae77..4268fc7 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/LenDescriptor.java
@@ -71,8 +71,8 @@
// result
private final AMutableInt64 res = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
@@ -83,8 +83,8 @@
if (serList[offset] != ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG
&& serList[offset] != ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG) {
- throw new TypeMismatchException(getIdentifier(), 0,
- serList[offset], ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG,
+ throw new TypeMismatchException(getIdentifier(), 0, serList[offset],
+ ATypeTag.SERIALIZED_ORDEREDLIST_TYPE_TAG,
ATypeTag.SERIALIZED_UNORDEREDLIST_TYPE_TAG);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericATan2Descriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericATan2Descriptor.java
index 6b15167..40cb659 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericATan2Descriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericATan2Descriptor.java
@@ -72,8 +72,8 @@
// For the output.
private final AMutableDouble aDouble = new AMutableDouble(0.0);
@SuppressWarnings("rawtypes")
- private final ISerializerDeserializer outputSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADOUBLE);
+ private final ISerializerDeserializer outputSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private final DataOutput out = resultStorage.getDataOutput();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCaretDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCaretDescriptor.java
index 642bf71..3b72072 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCaretDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericCaretDescriptor.java
@@ -43,7 +43,7 @@
*/
@Override
protected long evaluateInteger(long lhs, long rhs) throws HyracksDataException {
- if(rhs > Integer.MAX_VALUE){
+ if (rhs > Integer.MAX_VALUE) {
throw new ArithmeticException("Exponent cannot be larger than 2^31-1");
}
return LongMath.checkedPow(lhs, (int) rhs);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericDivideDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericDivideDescriptor.java
index b6f9117..7cda149 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericDivideDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericDivideDescriptor.java
@@ -46,7 +46,7 @@
if (rhs == 0) {
throw new ArithmeticException("Division by Zero.");
}
- if ( (lhs == Long.MIN_VALUE) && (rhs == -1L) ) {
+ if ((lhs == Long.MIN_VALUE) && (rhs == -1L)) {
throw new ArithmeticException(("Overflow in integer division"));
}
return lhs / rhs;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericTruncDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericTruncDescriptor.java
index e2cf5e7..fb52641 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericTruncDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericTruncDescriptor.java
@@ -118,9 +118,9 @@
} else if (bytes[offset] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
return (int) AInt64SerializerDeserializer.getLong(bytes, offset + 1);
} else {
- throw new TypeMismatchException(getIdentifier(), 1, bytes[offset],
- ATypeTag.SERIALIZED_INT8_TYPE_TAG, ATypeTag.SERIALIZED_INT16_TYPE_TAG,
- ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT64_TYPE_TAG);
+ throw new TypeMismatchException(getIdentifier(), 1, bytes[offset], ATypeTag.SERIALIZED_INT8_TYPE_TAG,
+ ATypeTag.SERIALIZED_INT16_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG,
+ ATypeTag.SERIALIZED_INT64_TYPE_TAG);
}
}
@@ -178,10 +178,10 @@
serde.serialize(aDouble, out);
}
} else {
- throw new TypeMismatchException(getIdentifier(), 0, data[offset],
- ATypeTag.SERIALIZED_INT8_TYPE_TAG, ATypeTag.SERIALIZED_INT16_TYPE_TAG,
- ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT64_TYPE_TAG,
- ATypeTag.SERIALIZED_FLOAT_TYPE_TAG, ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
+ throw new TypeMismatchException(getIdentifier(), 0, data[offset], ATypeTag.SERIALIZED_INT8_TYPE_TAG,
+ ATypeTag.SERIALIZED_INT16_TYPE_TAG, ATypeTag.SERIALIZED_INT32_TYPE_TAG,
+ ATypeTag.SERIALIZED_INT64_TYPE_TAG, ATypeTag.SERIALIZED_FLOAT_TYPE_TAG,
+ ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
}
result.set(resultStorage);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java
index 362d808..c9a865b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenDescriptor.java
@@ -49,8 +49,8 @@
public class PrefixLenDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
- private final static FunctionIdentifier FID = new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "prefix-len@3",
- 3);
+ private final static FunctionIdentifier FID =
+ new FunctionIdentifier(FunctionConstants.ASTERIX_NS, "prefix-len@3", 3);
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
public IFunctionDescriptor createFunctionDescriptor() {
@@ -80,8 +80,8 @@
// result
private final AMutableInt32 res = new AMutableInt32(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt32> int32Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ private final ISerializerDeserializer<AInt32> int32Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
@@ -115,8 +115,8 @@
throw new TypeMismatchException(getIdentifier(), 2, data[offset],
ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
- SimilarityFilters similarityFilters = similarityFiltersCache.get(similarityThreshold, data,
- offset, len);
+ SimilarityFilters similarityFilters =
+ similarityFiltersCache.get(similarityThreshold, data, offset, len);
int prefixLength = similarityFilters.getPrefixLength(length);
res.setValue(prefixLength);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenJaccardDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenJaccardDescriptor.java
index 5a67121..25d4be3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenJaccardDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/PrefixLenJaccardDescriptor.java
@@ -78,8 +78,8 @@
// result
private final AMutableInt32 res = new AMutableInt32(0);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt32> int32Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT32);
+ private final ISerializerDeserializer<AInt32> int32Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityJaccardPrefixCheckDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityJaccardPrefixCheckDescriptor.java
index 2d88402..e1b5f3f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityJaccardPrefixCheckDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SimilarityJaccardPrefixCheckDescriptor.java
@@ -73,11 +73,11 @@
private final OrderedListBuilder listBuilder;
private ArrayBackedValueStorage inputVal;
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private final ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AFloat> floatSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AFLOAT);
+ private final ISerializerDeserializer<AFloat> floatSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AFLOAT);
private final AMutableFloat aFloat = new AMutableFloat(0);
private final AOrderedListType listType = new AOrderedListType(BuiltinType.ANY, "list");
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialIntersectDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialIntersectDescriptor.java
index 6edbf6f..4229f87 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialIntersectDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SpatialIntersectDescriptor.java
@@ -92,14 +92,14 @@
private boolean pointOnLine(double pX, double pY, double startX, double startY, double endX,
double endY) throws HyracksDataException {
- double crossProduct = SpatialUtils.crossProduct(pY - startY, pX - startX, endY - startY,
- endX - startX);
+ double crossProduct =
+ SpatialUtils.crossProduct(pY - startY, pX - startX, endY - startY, endX - startX);
if (Math.abs(crossProduct) > SpatialUtils.doubleEpsilon()) { // crossProduct != 0
return false;
}
- double dotProduct = SpatialUtils.dotProduct((pX - startX), (pY - startY), (endX - startX),
- (endY - startY));
+ double dotProduct =
+ SpatialUtils.dotProduct((pX - startX), (pY - startY), (endX - startX), (endY - startY));
if (dotProduct < 0.0) {
return false;
}
@@ -536,8 +536,8 @@
min1 = spatialUtils.getMinProjection();
max1 = spatialUtils.getMaxProjection();
- dotProduct = SpatialUtils.dotProduct(spatialUtils.getXAxis(), spatialUtils.getYAxis(), cX,
- cY);
+ dotProduct =
+ SpatialUtils.dotProduct(spatialUtils.getXAxis(), spatialUtils.getYAxis(), cX, cY);
max2 = dotProduct + radius;
min2 = dotProduct - radius;
@@ -687,9 +687,9 @@
trianglesX1.reset();
trianglesY1.reset();
while (true) {
- middleVertex1 = triangulatePolygon(bytes1, offset1, numOfPoints1, pointsOffsets1,
- trianglesX1, trianglesY1, numOfTriangles1, nonSimplePolygonDetection1,
- middleVertex1);
+ middleVertex1 =
+ triangulatePolygon(bytes1, offset1, numOfPoints1, pointsOffsets1, trianglesX1,
+ trianglesY1, numOfTriangles1, nonSimplePolygonDetection1, middleVertex1);
if (middleVertex1 == -1) {
break;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringContainsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringContainsDescriptor.java
index f55ef3d..46865cf 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringContainsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringContainsDescriptor.java
@@ -49,8 +49,7 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
- return new AbstractBinaryStringBoolEval(ctx, args[0], args[1],
- BuiltinFunctions.STRING_CONTAINS) {
+ return new AbstractBinaryStringBoolEval(ctx, args[0], args[1], BuiltinFunctions.STRING_CONTAINS) {
@Override
protected boolean compute(UTF8StringPointable left, UTF8StringPointable right) throws IOException {
return UTF8StringPointable.contains(left, right, false);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEndsWithDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEndsWithDescriptor.java
index ab0cfd3..6422158 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEndsWithDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEndsWithDescriptor.java
@@ -49,8 +49,7 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
- return new AbstractBinaryStringBoolEval(ctx, args[0], args[1],
- BuiltinFunctions.STRING_ENDS_WITH) {
+ return new AbstractBinaryStringBoolEval(ctx, args[0], args[1], BuiltinFunctions.STRING_ENDS_WITH) {
@Override
protected boolean compute(UTF8StringPointable left, UTF8StringPointable right) throws IOException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEqualDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEqualDescriptor.java
index 6a7ad51..71a5742 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEqualDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringEqualDescriptor.java
@@ -49,8 +49,7 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
- return new AbstractBinaryStringBoolEval(ctx, args[0], args[1],
- BuiltinFunctions.STRING_EQUAL) {
+ return new AbstractBinaryStringBoolEval(ctx, args[0], args[1], BuiltinFunctions.STRING_EQUAL) {
@Override
protected boolean compute(UTF8StringPointable left, UTF8StringPointable right) throws IOException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java
index 58d63d1..bf62316 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringLengthDescriptor.java
@@ -66,8 +66,8 @@
private IPointable inputArg = new VoidPointable();
private IScalarEvaluator eval = args[0].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable resultPointable)
@@ -83,8 +83,8 @@
result.setValue(len);
int64Serde.serialize(result, out);
} else {
- throw new TypeMismatchException(getIdentifier(), 0,
- serString[offset], ATypeTag.SERIALIZED_STRING_TYPE_TAG);
+ throw new TypeMismatchException(getIdentifier(), 0, serString[offset],
+ ATypeTag.SERIALIZED_STRING_TYPE_TAG);
}
resultPointable.set(resultStorage);
} catch (IOException e1) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRTrim2Descriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRTrim2Descriptor.java
index 1c6b1f7..5d32bca 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRTrim2Descriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRTrim2Descriptor.java
@@ -70,5 +70,4 @@
};
}
-
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRepeatDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRepeatDescriptor.java
index fe864f4..77c4f20 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRepeatDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringRepeatDescriptor.java
@@ -83,8 +83,8 @@
// Gets the repeating times.
byte[] bytes = argNumber.getByteArray();
int offset = argNumber.getStartOffset();
- int repeatingTimes = ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 1, bytes,
- offset);
+ int repeatingTimes =
+ ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 1, bytes, offset);
// Checks repeatingTimes. It should be a non-negative value.
if (repeatingTimes < 0) {
throw new RuntimeDataException(ErrorCode.NEGATIVE_VALUE, getIdentifier(), 1,
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringSplitDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringSplitDescriptor.java
index 7027343..f788366 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringSplitDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringSplitDescriptor.java
@@ -117,8 +117,8 @@
// Gets the string length of the source string.
int inputStringLen = UTF8StringUtil.getUTFLength(srcString, srcOffset + 1);
- int inputStringStart = srcOffset + 1
- + UTF8StringUtil.getNumBytesToStoreLength(inputStringLen);
+ int inputStringStart =
+ srcOffset + 1 + UTF8StringUtil.getNumBytesToStoreLength(inputStringLen);
// Gets the string length of the pattern string.
int inputPatternLen = UTF8StringUtil.getUTFLength(patternString, patternOffset + 1);
// Handles the case that the pattern is "".
@@ -128,8 +128,8 @@
listBuilder.reset(intListType);
int itemStrStart = 0;
int nextMatchStart;
- while (itemStrStart < inputStringLen && (nextMatchStart = UTF8StringPointable
- .find(argStrPtr, argPatternPtr, false, itemStrStart)) >= 0) {
+ while (itemStrStart < inputStringLen && (nextMatchStart =
+ UTF8StringPointable.find(argStrPtr, argPatternPtr, false, itemStrStart)) >= 0) {
// Adds an item string.
addItemString(srcString, inputStringStart, itemStrStart,
emptyStringPattern ? nextMatchStart + 1 : nextMatchStart);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringStartsWithDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringStartsWithDescriptor.java
index 75ffeca..aa43e66 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringStartsWithDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringStartsWithDescriptor.java
@@ -49,8 +49,7 @@
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
- return new AbstractBinaryStringBoolEval(ctx, args[0], args[1],
- BuiltinFunctions.STRING_STARTS_WITH) {
+ return new AbstractBinaryStringBoolEval(ctx, args[0], args[1], BuiltinFunctions.STRING_STARTS_WITH) {
@Override
protected boolean compute(UTF8StringPointable left, UTF8StringPointable right) throws IOException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java
index fa06e4d..fe5ad9f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringToCodePointDescriptor.java
@@ -74,8 +74,8 @@
private ArrayBackedValueStorage inputVal = new ArrayBackedValueStorage();
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 aInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringTrimDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringTrimDescriptor.java
index a459f42..bc4d150 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringTrimDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/StringTrimDescriptor.java
@@ -68,5 +68,4 @@
};
}
-
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java
index caaa11a..28bbe08 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/SubstringDescriptor.java
@@ -103,8 +103,7 @@
try {
UTF8StringPointable.substr(string, start, len, builder, array);
} catch (StringIndexOutOfBoundsException e) {
- throw new RuntimeDataException(ErrorCode.OUT_OF_BOUND, getIdentifier(), 1,
- start + len - 1);
+ throw new RuntimeDataException(ErrorCode.OUT_OF_BOUND, getIdentifier(), 1, start + len - 1);
} catch (IOException e) {
throw new HyracksDataException(e);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToBooleanDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToBooleanDescriptor.java
index ae6aca5..704630e 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToBooleanDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToBooleanDescriptor.java
@@ -95,8 +95,7 @@
setInteger(UTF8StringUtil.getStringLength(bytes, offset + 1), result);
break;
case ARRAY:
- setInteger(AOrderedListSerializerDeserializer.getNumberOfItems(bytes, offset),
- result);
+ setInteger(AOrderedListSerializerDeserializer.getNumberOfItems(bytes, offset), result);
break;
case MULTISET:
setInteger(AUnorderedListSerializerDeserializer.getNumberOfItems(bytes, offset),
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToDoubleDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToDoubleDescriptor.java
index a7e4d94..71cdbe5 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToDoubleDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/ToDoubleDescriptor.java
@@ -60,7 +60,7 @@
return new AbstractDoubleConstructorEvaluator(args[0].createScalarEvaluator(ctx)) {
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<ANull> nullSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
@Override
protected void evaluateImpl(IPointable result) throws IOException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/UUIDDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/UUIDDescriptor.java
index e4980f1..a254738 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/UUIDDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/UUIDDescriptor.java
@@ -57,8 +57,8 @@
private static final long serialVersionUID = 1L;
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<AUUID> uuidSerDe = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AUUID);
+ private final ISerializerDeserializer<AUUID> uuidSerDe =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AUUID);
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractFindBinaryEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractFindBinaryEvaluator.java
index a1f097d..827b43e 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractFindBinaryEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractFindBinaryEvaluator.java
@@ -41,8 +41,8 @@
protected final ByteArrayPointable wordPtr = new ByteArrayPointable();
@SuppressWarnings("unchecked")
- protected ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ protected ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
public AbstractFindBinaryEvaluator(IHyracksTaskContext context, IScalarEvaluatorFactory[] copyEvaluatorFactories,
String functionName) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractSubBinaryEvaluator.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractSubBinaryEvaluator.java
index d777bc7..cd7b7d3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractSubBinaryEvaluator.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/AbstractSubBinaryEvaluator.java
@@ -54,10 +54,10 @@
}
try {
- ATypeTag argTag0 = ATypeTag.VALUE_TYPE_MAPPING[pointables[0].getByteArray()[pointables[0]
- .getStartOffset()]];
- ATypeTag argTag1 = ATypeTag.VALUE_TYPE_MAPPING[pointables[1].getByteArray()[pointables[1]
- .getStartOffset()]];
+ ATypeTag argTag0 =
+ ATypeTag.VALUE_TYPE_MAPPING[pointables[0].getByteArray()[pointables[0].getStartOffset()]];
+ ATypeTag argTag1 =
+ ATypeTag.VALUE_TYPE_MAPPING[pointables[1].getByteArray()[pointables[1].getStartOffset()]];
checkTypeMachingThrowsIfNot(functionName, EXPECTED_INPUT_TAGS, argTag0, argTag1);
byteArrayPointable.set(pointables[0].getByteArray(), pointables[0].getStartOffset() + 1,
@@ -68,8 +68,8 @@
int subStart;
// strange SQL index convention
- subStart = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SUBBINARY_FROM.getName(), 1, startBytes,
- offset) - 1;
+ subStart = ATypeHierarchy.getIntegerValue(BuiltinFunctions.SUBBINARY_FROM.getName(), 1, startBytes, offset)
+ - 1;
int totalLength = byteArrayPointable.getContentLength();
int subLength = getSubLength(tuple);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/BinaryLengthDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/BinaryLengthDescriptor.java
index 41fb805..2c53225 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/BinaryLengthDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/BinaryLengthDescriptor.java
@@ -60,8 +60,8 @@
private AMutableInt64 result = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> intSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> intSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable resultPointable)
@@ -72,7 +72,7 @@
.getStartOffset()]];
checkTypeMachingThrowsIfNot(getIdentifier().getName(), EXPECTED_TAGS, tag);
int len = ByteArrayPointable.getContentLength(pointables[0].getByteArray(),
- pointables[0].getStartOffset() + 1);
+ pointables[0].getStartOffset() + 1);
result.setValue(len);
intSerde.serialize(result, dataOutput);
resultPointable.set(resultStorage);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/FindBinaryFromDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/FindBinaryFromDescriptor.java
index 299eaec..71f8459 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/FindBinaryFromDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/FindBinaryFromDescriptor.java
@@ -57,8 +57,7 @@
@Override
protected int getFromOffset(IFrameTupleReference tuple) throws HyracksDataException {
return ATypeHierarchy.getIntegerValue(getIdentifier().getName(), 2,
- pointables[2].getByteArray(),
- pointables[2].getStartOffset());
+ pointables[2].getByteArray(), pointables[2].getStartOffset());
}
};
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/ParseBinaryDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/ParseBinaryDescriptor.java
index 36a0b1e..67d7b0f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/ParseBinaryDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/binary/ParseBinaryDescriptor.java
@@ -70,8 +70,8 @@
return new AbstractBinaryScalarEvaluator(ctx, args) {
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ABinary> binarySerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABINARY);
+ private ISerializerDeserializer<ABinary> binarySerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABINARY);
private AMutableBinary aBinary = new AMutableBinary(new byte[0], 0, 0);
private final UTF8StringPointable stringPointable = new UTF8StringPointable();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java
index 2019b55..9a3f3b9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/GetRecordFieldsEvalFactory.java
@@ -52,8 +52,8 @@
public IScalarEvaluator createScalarEvaluator(final IHyracksTaskContext ctx) throws HyracksDataException {
return new IScalarEvaluator() {
- private final ARecordPointable recordPointable = (ARecordPointable) ARecordPointable.FACTORY
- .createPointable();
+ private final ARecordPointable recordPointable =
+ (ARecordPointable) ARecordPointable.FACTORY.createPointable();
private IPointable inputArg0 = new VoidPointable();
private IScalarEvaluator eval0 = recordEvalFactory.createScalarEvaluator(ctx);
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordFieldsUtil.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordFieldsUtil.java
index 6c0eb06..d93d572 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordFieldsUtil.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordFieldsUtil.java
@@ -59,25 +59,25 @@
private final static AString nestedName = new AString("nested");
private final static AString listName = new AString("list");
- private IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<IARecordBuilder, ATypeTag>(
- new RecordBuilderFactory());
- private IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool = new ListObjectPool<IAsterixListBuilder, ATypeTag>(
- new ListBuilderFactory());
- private IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool = new ListObjectPool<IMutableValueStorage, ATypeTag>(
- new AbvsBuilderFactory());
- private IObjectPool<IPointable, ATypeTag> recordPointablePool = new ListObjectPool<IPointable, ATypeTag>(
- ARecordPointable.ALLOCATOR);
- private IObjectPool<IPointable, ATypeTag> listPointablePool = new ListObjectPool<IPointable, ATypeTag>(
- AListPointable.ALLOCATOR);
+ private IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool =
+ new ListObjectPool<IARecordBuilder, ATypeTag>(new RecordBuilderFactory());
+ private IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool =
+ new ListObjectPool<IAsterixListBuilder, ATypeTag>(new ListBuilderFactory());
+ private IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool =
+ new ListObjectPool<IMutableValueStorage, ATypeTag>(new AbvsBuilderFactory());
+ private IObjectPool<IPointable, ATypeTag> recordPointablePool =
+ new ListObjectPool<IPointable, ATypeTag>(ARecordPointable.ALLOCATOR);
+ private IObjectPool<IPointable, ATypeTag> listPointablePool =
+ new ListObjectPool<IPointable, ATypeTag>(AListPointable.ALLOCATOR);
private final static AOrderedListType listType = new AOrderedListType(BuiltinType.ANY, "fields");
//Better not be a static object.
@SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<AString> stringSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ASTRING);
+ protected final ISerializerDeserializer<AString> stringSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ASTRING);
@SuppressWarnings("unchecked")
- protected final ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ protected final ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
private final static ARecordType openType = DefaultOpenFieldType.NESTED_OPEN_RECORD_TYPE;
@@ -175,8 +175,7 @@
orderedListBuilder.write(out, true);
}
- public void addNameField(IValueReference nameArg, IARecordBuilder fieldRecordBuilder)
- throws HyracksDataException {
+ public void addNameField(IValueReference nameArg, IARecordBuilder fieldRecordBuilder) throws HyracksDataException {
ArrayBackedValueStorage fieldAbvs = getTempBuffer();
fieldAbvs.reset();
@@ -184,8 +183,7 @@
fieldRecordBuilder.addField(fieldAbvs, nameArg);
}
- public void addFieldType(byte tagId, IARecordBuilder fieldRecordBuilder)
- throws HyracksDataException {
+ public void addFieldType(byte tagId, IARecordBuilder fieldRecordBuilder) throws HyracksDataException {
ArrayBackedValueStorage fieldAbvs = getTempBuffer();
ArrayBackedValueStorage valueAbvs = getTempBuffer();
@@ -201,8 +199,7 @@
fieldRecordBuilder.addField(fieldAbvs, valueAbvs);
}
- public void addIsOpenField(boolean isOpen, IARecordBuilder fieldRecordBuilder)
- throws HyracksDataException {
+ public void addIsOpenField(boolean isOpen, IARecordBuilder fieldRecordBuilder) throws HyracksDataException {
ArrayBackedValueStorage fieldAbvs = getTempBuffer();
ArrayBackedValueStorage valueAbvs = getTempBuffer();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java
index 63b58ed..9ff670f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordMergeDescriptor.java
@@ -131,7 +131,7 @@
try {
mergeFields(outRecType, rp0, rp1, true, 0);
rbStack.get(0).write(out, true);
- } catch (IOException e) {
+ } catch (IOException e) {
throw new HyracksDataException(e);
}
result.set(resultStorage);
@@ -168,8 +168,7 @@
openFromParent, nestedLevel);
foundMatch = true;
} else {
- throw new RuntimeDataException(ErrorCode.DUPLICATE_FIELD_NAME,
- getIdentifier());
+ throw new RuntimeDataException(ErrorCode.DUPLICATE_FIELD_NAME, getIdentifier());
}
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordPairsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordPairsDescriptor.java
index e5ed628..2bf2530 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordPairsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordPairsDescriptor.java
@@ -108,8 +108,8 @@
return new IScalarEvaluator() {
private final IScalarEvaluator argEvaluator = args[0].createScalarEvaluator(ctx);
private final IPointable argPtr = new VoidPointable();
- private final ARecordVisitablePointable recordVisitablePointable = new ARecordVisitablePointable(
- recType);
+ private final ARecordVisitablePointable recordVisitablePointable =
+ new ARecordVisitablePointable(recType);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java
index 74f7259..192a14b 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/records/RecordRemoveFieldsEvalFactory.java
@@ -125,8 +125,7 @@
}
private void processRecord(ARecordType requiredType, ARecordVisitablePointable srp,
- AListVisitablePointable inputList, int nestedLevel)
- throws IOException {
+ AListVisitablePointable inputList, int nestedLevel) throws IOException {
if (rbStack.size() < (nestedLevel + 1)) {
rbStack.add(new RecordBuilder());
}
@@ -156,8 +155,7 @@
private void addKeptFieldToSubRecord(ARecordType requiredType, IVisitablePointable fieldNamePointable,
IVisitablePointable fieldValuePointable, IVisitablePointable fieldTypePointable,
- AListVisitablePointable inputList, int nestedLevel)
- throws IOException {
+ AListVisitablePointable inputList, int nestedLevel) throws IOException {
runtimeRecordTypeInfo.reset(requiredType);
int pos = runtimeRecordTypeInfo.getFieldIndex(fieldNamePointable.getByteArray(),
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
index fb7b235..0a1bcf5 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/AbstractIntervalLogicFuncDescriptor.java
@@ -54,21 +54,21 @@
protected final IntervalLogic il = new IntervalLogic();
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
- private TaggedValuePointable argPtr0 = (TaggedValuePointable) TaggedValuePointable.FACTORY
- .createPointable();
- private TaggedValuePointable argPtr1 = (TaggedValuePointable) TaggedValuePointable.FACTORY
- .createPointable();
- private AIntervalPointable interval0 = (AIntervalPointable) AIntervalPointable.FACTORY
- .createPointable();
- private AIntervalPointable interval1 = (AIntervalPointable) AIntervalPointable.FACTORY
- .createPointable();
+ private TaggedValuePointable argPtr0 =
+ (TaggedValuePointable) TaggedValuePointable.FACTORY.createPointable();
+ private TaggedValuePointable argPtr1 =
+ (TaggedValuePointable) TaggedValuePointable.FACTORY.createPointable();
+ private AIntervalPointable interval0 =
+ (AIntervalPointable) AIntervalPointable.FACTORY.createPointable();
+ private AIntervalPointable interval1 =
+ (AIntervalPointable) AIntervalPointable.FACTORY.createPointable();
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
// possible output types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ABoolean> booleanSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer<ABoolean> booleanSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
index a06b440..3dab641 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDuartionFromDateDescriptor.java
@@ -75,8 +75,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADuration> durationSerde = SerializerDeserializerProvider.
- INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
+ private ISerializerDeserializer<ADuration> durationSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
private AMutableDuration aDuration = new AMutableDuration(0, 0);
@@ -103,8 +103,8 @@
ATypeTag.SERIALIZED_DURATION_TYPE_TAG);
}
- int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(bytes1,
- offset1 + 1);
+ int yearMonthDurationInMonths =
+ ADurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1);
long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
long startingTimePoint = ADateSerializerDeserializer.getChronon(bytes0, offset0 + 1)
@@ -174,8 +174,7 @@
boolean isLeapYear = calInstanct.isLeapYear(year1);
// need to "borrow" the days in previous month to make the day positive; when month is
// 1 (Jan), Dec will be borrowed
- day += isLeapYear
- ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
+ day += isLeapYear ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
: (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[(12 + month1 - 2) % 12]);
month -= 1;
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
index 904200e..867d030 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CalendarDurationFromDateTimeDescriptor.java
@@ -92,8 +92,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADuration> durationSerde = SerializerDeserializerProvider.
- INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
+ private ISerializerDeserializer<ADuration> durationSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
private AMutableDuration aDuration = new AMutableDuration(0, 0);
@@ -120,8 +120,8 @@
ATypeTag.SERIALIZED_DURATION_TYPE_TAG);
}
- int yearMonthDurationInMonths = ADurationSerializerDeserializer.getYearMonth(bytes1,
- offset1 + 1);
+ int yearMonthDurationInMonths =
+ ADurationSerializerDeserializer.getYearMonth(bytes1, offset1 + 1);
long dayTimeDurationInMs = ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1);
long startingTimePoint = ADateTimeSerializerDeserializer.getChronon(bytes0, offset0 + 1);
@@ -188,8 +188,7 @@
boolean isLeapYear = calInstanct.isLeapYear(year1);
// need to "borrow" the days in previous month to make the day positive; when month is
// 1 (Jan), Dec will be borrowed
- day += isLeapYear
- ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
+ day += isLeapYear ? (GregorianCalendarSystem.DAYS_OF_MONTH_LEAP[(12 + month1 - 2) % 12])
: (GregorianCalendarSystem.DAYS_OF_MONTH_ORDI[(12 + month1 - 2) % 12]);
month -= 1;
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java
index 2174432..8d4cff5 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateDescriptor.java
@@ -66,8 +66,8 @@
private DataOutput out = resultStorage.getDataOutput();
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
private AMutableDate aDate = new AMutableDate(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java
index 9c284b5..5491c39 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentDateTimeDescriptor.java
@@ -67,8 +67,8 @@
private DataOutput out = resultStorage.getDataOutput();
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private ISerializerDeserializer<ADateTime> datetimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
private AMutableDateTime aDateTime = new AMutableDateTime(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java
index 3ce0e5a..f9cfbf2 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/CurrentTimeDescriptor.java
@@ -68,8 +68,8 @@
private DataOutput out = resultStorage.getDataOutput();
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private AMutableTime aTime = new AMutableTime(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
index 557591e..cfac2e9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromDatetimeDescriptor.java
@@ -72,8 +72,8 @@
// possible returning types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
private AMutableDate aDate = new AMutableDate(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
index 09c74e0..a365989 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DateFromUnixTimeInDaysDescriptor.java
@@ -70,8 +70,8 @@
// possible returning types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
index 87ef5d2..f70a6e2 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromDateAndTimeDescriptor.java
@@ -77,8 +77,8 @@
// possible returning types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private ISerializerDeserializer<ADateTime> datetimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
private AMutableDateTime aDateTime = new AMutableDateTime(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java
index d4db02e..87e83d7 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInMsDescriptor.java
@@ -78,8 +78,8 @@
// possible output types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private ISerializerDeserializer<ADateTime> datetimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
private AMutableDateTime aDatetime = new AMutableDateTime(0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInSecsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInSecsDescriptor.java
index 2bbbd5c..abf4016 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInSecsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DatetimeFromUnixTimeInSecsDescriptor.java
@@ -77,8 +77,8 @@
// possible output types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private ISerializerDeserializer<ADateTime> datetimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
private AMutableDateTime aDatetime = new AMutableDateTime(0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
index 4e93872..6a3e128 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayOfWeekDescriptor.java
@@ -75,8 +75,8 @@
// possible returning types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private AMutableInt64 aInt64 = new AMutableInt64(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
index dd5c2c5..cb26c58 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DayTimeDurationComparatorDescriptor.java
@@ -68,8 +68,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ABoolean> boolSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer<ABoolean> boolSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
index 0f6730a..9b29ca8 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationEqualDescriptor.java
@@ -69,8 +69,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ABoolean> boolSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer<ABoolean> boolSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
@@ -96,10 +96,10 @@
offset0 + 1) == ADurationSerializerDeserializer.getDayTime(bytes1, offset1 + 1))
&& (ADurationSerializerDeserializer.getYearMonth(bytes0,
offset0 + 1) == ADurationSerializerDeserializer.getYearMonth(bytes1,
- offset1 + 1))) {
- boolSerde.serialize(ABoolean.TRUE, out);
+ offset1 + 1))) {
+ boolSerde.serialize(ABoolean.TRUE, out);
} else {
- boolSerde.serialize(ABoolean.FALSE, out);
+ boolSerde.serialize(ABoolean.FALSE, out);
}
result.set(resultStorage);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java
index f17f330..d8be601 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromIntervalDescriptor.java
@@ -71,8 +71,8 @@
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ADayTimeDuration> dayTimeDurationSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(
- BuiltinType.ADAYTIMEDURATION);
+ SerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADAYTIMEDURATION);
private AMutableDayTimeDuration aDayTimeDuration = new AMutableDayTimeDuration(0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMillisecondsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMillisecondsDescriptor.java
index 38eaa1b..f1987ca 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMillisecondsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMillisecondsDescriptor.java
@@ -73,8 +73,8 @@
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADuration> durationSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADURATION);
+ private ISerializerDeserializer<ADuration> durationSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
AMutableDuration aDuration = new AMutableDuration(0, 0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
index 6277525..b066c0d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/DurationFromMonthsDescriptor.java
@@ -68,8 +68,8 @@
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADuration> durationSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADURATION);
+ private ISerializerDeserializer<ADuration> durationSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADURATION);
AMutableDuration aDuration = new AMutableDuration(0, 0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java
index 3707f38..9410e6d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetDayTimeDurationDescriptor.java
@@ -70,8 +70,8 @@
@SuppressWarnings("unchecked")
private ISerializerDeserializer<ADayTimeDuration> dayTimeDurationSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(
- BuiltinType.ADAYTIMEDURATION);
+ SerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.ADAYTIMEDURATION);
AMutableDayTimeDuration aDayTimeDuration = new AMutableDayTimeDuration(0);
@@ -88,8 +88,7 @@
ATypeTag.SERIALIZED_DURATION_TYPE_TAG);
}
- aDayTimeDuration
- .setMilliseconds(ADurationSerializerDeserializer.getDayTime(bytes, offset + 1));
+ aDayTimeDuration.setMilliseconds(ADurationSerializerDeserializer.getDayTime(bytes, offset + 1));
dayTimeDurationSerde.serialize(aDayTimeDuration, out);
result.set(resultStorage);
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java
index d7145ca..4bd3ede 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetOverlappingIntervalDescriptor.java
@@ -65,22 +65,22 @@
protected final IntervalLogic il = new IntervalLogic();
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private DataOutput out = resultStorage.getDataOutput();
- private TaggedValuePointable argPtr0 = (TaggedValuePointable) TaggedValuePointable.FACTORY
- .createPointable();
- private TaggedValuePointable argPtr1 = (TaggedValuePointable) TaggedValuePointable.FACTORY
- .createPointable();
- private AIntervalPointable interval0 = (AIntervalPointable) AIntervalPointable.FACTORY
- .createPointable();
- private AIntervalPointable interval1 = (AIntervalPointable) AIntervalPointable.FACTORY
- .createPointable();
+ private TaggedValuePointable argPtr0 =
+ (TaggedValuePointable) TaggedValuePointable.FACTORY.createPointable();
+ private TaggedValuePointable argPtr1 =
+ (TaggedValuePointable) TaggedValuePointable.FACTORY.createPointable();
+ private AIntervalPointable interval0 =
+ (AIntervalPointable) AIntervalPointable.FACTORY.createPointable();
+ private AIntervalPointable interval1 =
+ (AIntervalPointable) AIntervalPointable.FACTORY.createPointable();
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
private final AMutableInterval aInterval = new AMutableInterval(0, 0, (byte) -1);
@SuppressWarnings("unchecked")
- private final ISerializerDeserializer<ANull> nullSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ANULL);
+ private final ISerializerDeserializer<ANull> nullSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ANULL);
@SuppressWarnings("unchecked")
private final ISerializerDeserializer<AInterval> intervalSerde =
SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINTERVAL);
@@ -93,7 +93,6 @@
byte type0 = argPtr0.getTag();
byte type1 = argPtr1.getTag();
-
if (type0 == ATypeTag.SERIALIZED_INTERVAL_TYPE_TAG && type0 == type1) {
argPtr0.getValue(interval0);
argPtr1.getValue(interval1);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java
index 1b033cd..fd58d6c 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/GetYearMonthDurationDescriptor.java
@@ -69,8 +69,8 @@
@SuppressWarnings("unchecked")
private ISerializerDeserializer<AYearMonthDuration> yearMonthDurationSerde =
- SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(
- BuiltinType.AYEARMONTHDURATION);
+ SerializerDeserializerProvider.INSTANCE
+ .getSerializerDeserializer(BuiltinType.AYEARMONTHDURATION);
AMutableYearMonthDuration aYearMonthDuration = new AMutableYearMonthDuration(0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java
index 3426c28..5694c27 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/IntervalBinDescriptor.java
@@ -198,12 +198,12 @@
binEndChronon = DurationArithmeticOperations.addDuration(chrononToStart,
yearMonth * ((int) binIndex + 1), dayTime * (binIndex + 1), false);
- binStartChronon = binStartChronon / GregorianCalendarSystem.CHRONON_OF_DAY
- + ((binStartChronon < 0
+ binStartChronon =
+ binStartChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binStartChronon < 0
&& binStartChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1
: 0);
- binEndChronon = binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY
- + ((binEndChronon < 0
+ binEndChronon =
+ binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binEndChronon < 0
&& binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1
: 0);
break;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
index bab73e5..c3bf58d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MillisecondsFromDayTimeDurationDescriptor.java
@@ -68,8 +68,8 @@
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
AMutableInt64 aInt64 = new AMutableInt64(0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java
index 5eed081..3d595c3 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/MonthsFromYearMonthDurationDescriptor.java
@@ -68,8 +68,8 @@
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
AMutableInt64 aInt64 = new AMutableInt64(0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java
index 8e1064f..6980c65 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/OverlapBinsDescriptor.java
@@ -220,8 +220,8 @@
+ ((binStartChronon < 0
&& binStartChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0)
? -1 : 0);
- binEndChronon = binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY
- + ((binEndChronon < 0
+ binEndChronon =
+ binEndChronon / GregorianCalendarSystem.CHRONON_OF_DAY + ((binEndChronon < 0
&& binEndChronon % GregorianCalendarSystem.CHRONON_OF_DAY != 0) ? -1
: 0);
aInterval.setValue(binStartChronon, binEndChronon, intervalTypeTag);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java
index a615b09..e463eed 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateDescriptor.java
@@ -80,8 +80,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADate> dateSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATE);
+ private ISerializerDeserializer<ADate> dateSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATE);
private AMutableDate aDate = new AMutableDate(0);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
index c63a4e5..a391529 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseDateTimeDescriptor.java
@@ -75,8 +75,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ADateTime> datetimeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ADATETIME);
+ private ISerializerDeserializer<ADateTime> datetimeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADATETIME);
private AMutableDateTime aDateTime = new AMutableDateTime(0);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
index 8fa1d56..948c779 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/ParseTimeDescriptor.java
@@ -76,8 +76,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private AMutableTime aTime = new AMutableTime(0);
private final UTF8StringPointable utf8Ptr = new UTF8StringPointable();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
index 6ebed0f..273cba4 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromDatetimeDescriptor.java
@@ -76,8 +76,8 @@
// possible returning types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private AMutableTime aTime = new AMutableTime(0);
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
index ce4a5c4..a253a89 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/TimeFromUnixTimeInMsDescriptor.java
@@ -70,8 +70,8 @@
// possible output types
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ATime> timeSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ATIME);
+ private ISerializerDeserializer<ATime> timeSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ATIME);
private AMutableTime aTime = new AMutableTime(0);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDateInDaysDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDateInDaysDescriptor.java
index 53f521b..3dc449d 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDateInDaysDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDateInDaysDescriptor.java
@@ -71,8 +71,8 @@
// possible returning types
private AMutableInt64 aInt64 = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInMsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInMsDescriptor.java
index f5628d4..2dc57c5 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInMsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInMsDescriptor.java
@@ -69,8 +69,8 @@
// possible returning types
private AMutableInt64 aInt64 = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInSecsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInSecsDescriptor.java
index ade2093..34fedb6 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInSecsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromDatetimeInSecsDescriptor.java
@@ -75,8 +75,8 @@
// possible returning types
private AMutableInt64 aInt64 = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromTimeInMsDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromTimeInMsDescriptor.java
index 382feb0..0909764 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromTimeInMsDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/UnixTimeFromTimeInMsDescriptor.java
@@ -71,8 +71,8 @@
// possible returning types
private AMutableInt64 aInt64 = new AMutableInt64(0);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<AInt64> int64Serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer<AInt64> int64Serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDescriptor.java
index 42809d8..5748956 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/temporal/YearMonthDurationComparatorDescriptor.java
@@ -68,8 +68,8 @@
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
@SuppressWarnings("unchecked")
- private ISerializerDeserializer<ABoolean> boolSerde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.ABOOLEAN);
+ private ISerializerDeserializer<ABoolean> boolSerde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ABOOLEAN);
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/utils/RegExpMatcher.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/utils/RegExpMatcher.java
index 9206262..8d238df 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/utils/RegExpMatcher.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/utils/RegExpMatcher.java
@@ -113,8 +113,8 @@
StringEvaluatorUtils.copyResetUTF8Pointable(patternPtr, lastPatternStorage, lastPatternPtr);
// ! object creation !
String inputPatternString = lastPatternPtr.toString();
- String patternString = patternGenerator == null ? inputPatternString : patternGenerator
- .toRegExpPatternString(inputPatternString);
+ String patternString = patternGenerator == null ? inputPatternString
+ : patternGenerator.toRegExpPatternString(inputPatternString);
if (newFlag) {
pattern = Pattern.compile(patternString, StringEvaluatorUtils.toFlag(flagPtr.toString()));
@@ -167,8 +167,8 @@
*/
public String replace(UTF8StringPointable replaceStrPtr) {
// Sets up a new replacement string if necessary.
- final boolean newReplace = replaceStrPtr != null
- && (replaceStr == null || lastReplaceStrPtr.compareTo(replaceStrPtr) != 0);
+ final boolean newReplace =
+ replaceStrPtr != null && (replaceStr == null || lastReplaceStrPtr.compareTo(replaceStrPtr) != 0);
if (newReplace) {
StringEvaluatorUtils.copyResetUTF8Pointable(replaceStrPtr, lastReplaceStorage, lastReplaceStrPtr);
replaceStr = replaceStrPtr.toString();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/DeepEqualityVisitorHelper.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/DeepEqualityVisitorHelper.java
index 000425e..cc842e6 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/DeepEqualityVisitorHelper.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/DeepEqualityVisitorHelper.java
@@ -32,8 +32,10 @@
public static final int TABLE_SIZE = 100;
public static final int TABLE_FRAME_SIZE = 32768;
- private final ListItemBinaryHashFunctionFactory listItemBinaryHashFunctionFactory = ListItemBinaryHashFunctionFactory.INSTANCE;
- private final ListItemBinaryComparatorFactory listItemBinaryComparatorFactory = ListItemBinaryComparatorFactory.INSTANCE;
+ private final ListItemBinaryHashFunctionFactory listItemBinaryHashFunctionFactory =
+ ListItemBinaryHashFunctionFactory.INSTANCE;
+ private final ListItemBinaryComparatorFactory listItemBinaryComparatorFactory =
+ ListItemBinaryComparatorFactory.INSTANCE;
private final IBinaryHashFunction putHashFunc = listItemBinaryHashFunctionFactory.createBinaryHashFunction();
private final IBinaryHashFunction getHashFunc = listItemBinaryHashFunctionFactory.createBinaryHashFunction();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/ListDeepEqualityChecker.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/ListDeepEqualityChecker.java
index bcb41f5..6341b79 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/ListDeepEqualityChecker.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/ListDeepEqualityChecker.java
@@ -40,7 +40,8 @@
private BinaryEntry valEntry = new BinaryEntry();
private final DeepEqualityVisitorHelper deepEqualityVisitorHelper = new DeepEqualityVisitorHelper();
- private final Pair<IVisitablePointable, Boolean> itemVisitorArg = new Pair<IVisitablePointable, Boolean>(null, false);
+ private final Pair<IVisitablePointable, Boolean> itemVisitorArg =
+ new Pair<IVisitablePointable, Boolean>(null, false);
public ListDeepEqualityChecker() {
hashMap = deepEqualityVisitorHelper.initializeHashMap(valEntry);
@@ -50,22 +51,22 @@
DeepEqualityVisitor visitor) throws HyracksDataException {
this.visitor = visitor;
- AListVisitablePointable listLeft = (AListVisitablePointable)listPointableLeft;
+ AListVisitablePointable listLeft = (AListVisitablePointable) listPointableLeft;
List<IVisitablePointable> itemsLeft = listLeft.getItems();
List<IVisitablePointable> itemTagTypesLeft = listLeft.getItemTags();
-
- AListVisitablePointable listRight = (AListVisitablePointable)listPointableRight;
+ AListVisitablePointable listRight = (AListVisitablePointable) listPointableRight;
List<IVisitablePointable> itemsRight = listRight.getItems();
List<IVisitablePointable> itemTagTypesRight = listRight.getItemTags();
- if (itemsLeft.size() != itemsRight.size()) return false;
+ if (itemsLeft.size() != itemsRight.size())
+ return false;
boolean isOrderedRight = listLeft.ordered();
if (isOrderedRight != listRight.ordered())
return false;
- if( isOrderedRight) {
+ if (isOrderedRight) {
return processOrderedList(itemsLeft, itemTagTypesLeft, itemsRight, itemTagTypesRight);
} else {
return processUnorderedList(itemsLeft, itemTagTypesLeft, itemsRight, itemTagTypesRight);
@@ -75,9 +76,10 @@
private boolean processOrderedList(List<IVisitablePointable> itemsLeft, List<IVisitablePointable> itemTagTypesLeft,
List<IVisitablePointable> itemsRight, List<IVisitablePointable> itemTagTypesRight)
throws HyracksDataException {
- for(int i=0; i<itemsLeft.size(); i++) {
+ for (int i = 0; i < itemsLeft.size(); i++) {
ATypeTag fieldTypeLeft = PointableHelper.getTypeTag(itemTagTypesLeft.get(i));
- if(fieldTypeLeft.isDerivedType() && fieldTypeLeft != PointableHelper.getTypeTag(itemTagTypesRight.get(i))) {
+ if (fieldTypeLeft.isDerivedType()
+ && fieldTypeLeft != PointableHelper.getTypeTag(itemTagTypesRight.get(i))) {
return false;
}
itemVisitorArg.first = itemsRight.get(i);
@@ -89,13 +91,13 @@
return true;
}
- private boolean processUnorderedList(List<IVisitablePointable> itemsLeft, List<IVisitablePointable> itemTagTypesLeft,
- List<IVisitablePointable> itemsRight, List<IVisitablePointable> itemTagTypesRight)
- throws HyracksDataException {
+ private boolean processUnorderedList(List<IVisitablePointable> itemsLeft,
+ List<IVisitablePointable> itemTagTypesLeft, List<IVisitablePointable> itemsRight,
+ List<IVisitablePointable> itemTagTypesRight) throws HyracksDataException {
hashMap.clear();
// Build phase: Add items into hash map, starting with first list.
- for(int i=0; i<itemsLeft.size(); i++) {
+ for (int i = 0; i < itemsLeft.size(); i++) {
IVisitablePointable item = itemsLeft.get(i);
byte[] buf = item.getByteArray();
int off = item.getStartOffset();
@@ -108,12 +110,11 @@
return probeHashMap(itemsLeft, itemTagTypesLeft, itemsRight, itemTagTypesRight);
}
-
private boolean probeHashMap(List<IVisitablePointable> itemsLeft, List<IVisitablePointable> itemTagTypesLeft,
List<IVisitablePointable> itemsRight, List<IVisitablePointable> itemTagTypesRight)
throws HyracksDataException {
// Probe phase: Probe items from second list
- for(int indexRight=0; indexRight<itemsRight.size(); indexRight++) {
+ for (int indexRight = 0; indexRight < itemsRight.size(); indexRight++) {
IVisitablePointable itemRight = itemsRight.get(indexRight);
byte[] buf = itemRight.getByteArray();
int off = itemRight.getStartOffset();
@@ -128,7 +129,8 @@
int indexLeft = IntegerPointable.getInteger(entry.getBuf(), entry.getOffset());
ATypeTag fieldTypeLeft = PointableHelper.getTypeTag(itemTagTypesLeft.get(indexLeft));
- if(fieldTypeLeft.isDerivedType() && fieldTypeLeft != PointableHelper.getTypeTag(itemTagTypesRight.get(indexRight))) {
+ if (fieldTypeLeft.isDerivedType()
+ && fieldTypeLeft != PointableHelper.getTypeTag(itemTagTypesRight.get(indexRight))) {
return false;
}
@@ -140,4 +142,3 @@
return true;
}
}
-
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/RecordDeepEqualityChecker.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/RecordDeepEqualityChecker.java
index 699f2f8..1c033e9 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/RecordDeepEqualityChecker.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/visitors/RecordDeepEqualityChecker.java
@@ -32,8 +32,8 @@
import org.apache.hyracks.data.std.util.BinaryEntry;
class RecordDeepEqualityChecker {
- private final Pair<IVisitablePointable, Boolean> nestedVisitorArg = new Pair<IVisitablePointable, Boolean>(null,
- false);
+ private final Pair<IVisitablePointable, Boolean> nestedVisitorArg =
+ new Pair<IVisitablePointable, Boolean>(null, false);
private final DeepEqualityVisitorHelper deepEqualityVisitorHelper = new DeepEqualityVisitorHelper();
private DeepEqualityVisitor visitor;
private BinaryEntry keyEntry = new BinaryEntry();
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/IncompatibleTypeException.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/IncompatibleTypeException.java
index 3e5b311..5feb87f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/IncompatibleTypeException.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/IncompatibleTypeException.java
@@ -35,8 +35,7 @@
// Incompatible input parameters, e.g., "1.0" > 1.0
public IncompatibleTypeException(String functionName, byte typeTagLeft, byte typeTagRight) {
- super(ErrorCode.TYPE_INCOMPATIBLE, functionName,
- EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(typeTagLeft),
+ super(ErrorCode.TYPE_INCOMPATIBLE, functionName, EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(typeTagLeft),
EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(typeTagRight));
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/TypeMismatchException.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/TypeMismatchException.java
index c061197..f6cf1c7 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/TypeMismatchException.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/TypeMismatchException.java
@@ -41,5 +41,4 @@
EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(actualTypeTag));
}
-
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/UnsupportedTypeException.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/UnsupportedTypeException.java
index af86bd8..29b3819 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/UnsupportedTypeException.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/exceptions/UnsupportedTypeException.java
@@ -34,7 +34,6 @@
// Unsupported input type.
public UnsupportedTypeException(String funcName, byte actualTypeTag) {
- super(ErrorCode.TYPE_UNSUPPORTED, funcName,
- EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(actualTypeTag));
+ super(ErrorCode.TYPE_UNSUPPORTED, funcName, EnumDeserializer.ATYPETAGDESERIALIZER.deserialize(actualTypeTag));
}
}
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
index 0de61ff..7913d48 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/JobEventListenerFactory.java
@@ -60,9 +60,8 @@
@Override
public void updateListenerJobParameters(JobParameterByteStore jobParameterByteStore) {
- String AsterixTransactionIdString =
- new String(jobParameterByteStore.getParameterValue(TRANSACTION_ID_PARAMETER_NAME, 0,
- TRANSACTION_ID_PARAMETER_NAME.length));
+ String AsterixTransactionIdString = new String(jobParameterByteStore
+ .getParameterValue(TRANSACTION_ID_PARAMETER_NAME, 0, TRANSACTION_ID_PARAMETER_NAME.length));
if (AsterixTransactionIdString.length() > 0) {
this.txnId = new TxnId(Integer.parseInt(AsterixTransactionIdString));
}
@@ -75,8 +74,9 @@
@Override
public void jobletFinish(JobStatus jobStatus) {
try {
- ITransactionManager txnManager = ((INcApplicationContext) jobletContext.getServiceContext()
- .getApplicationContext()).getTransactionSubsystem().getTransactionManager();
+ ITransactionManager txnManager =
+ ((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
+ .getTransactionSubsystem().getTransactionManager();
ITransactionContext txnContext = txnManager.getTransactionContext(txnId);
txnContext.setWriteTxn(transactionalWrite);
if (jobStatus != JobStatus.FAILURE) {
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
index 656ea09..9b9206c 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/MultiTransactionJobletEventListenerFactory.java
@@ -94,8 +94,7 @@
new TransactionOptions(ITransactionManager.AtomicityLevel.ENTITY_LEVEL);
for (TxnId subTxnId : txnIdMap.values()) {
((INcApplicationContext) jobletContext.getServiceContext().getApplicationContext())
- .getTransactionSubsystem().getTransactionManager()
- .beginTransaction(subTxnId, options);
+ .getTransactionSubsystem().getTransactionManager().beginTransaction(subTxnId, options);
}
} catch (ACIDException e) {
throw new Error(e);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/NodeJobTracker.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/NodeJobTracker.java
index ff009dc..9a42420 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/NodeJobTracker.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/job/listener/NodeJobTracker.java
@@ -70,9 +70,8 @@
@Override
public synchronized Set<JobId> getPendingJobs(String nodeId) {
- return nodeJobs.containsKey(nodeId) ?
- Collections.unmodifiableSet(nodeJobs.get(nodeId)) :
- Collections.emptySet();
+ return nodeJobs.containsKey(nodeId) ? Collections.unmodifiableSet(nodeJobs.get(nodeId))
+ : Collections.emptySet();
}
@Override
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/runningaggregates/std/TidRunningAggregateDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/runningaggregates/std/TidRunningAggregateDescriptor.java
index 207ce85..0a959d6 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/runningaggregates/std/TidRunningAggregateDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/runningaggregates/std/TidRunningAggregateDescriptor.java
@@ -60,8 +60,8 @@
return new IRunningAggregateEvaluator() {
private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
- private final ISerializerDeserializer<AInt64> serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private final ISerializerDeserializer<AInt64> serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private final AMutableInt64 m = new AMutableInt64(0);
private int cnt;
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/RangeDescriptor.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/RangeDescriptor.java
index d102d7c..5ad8d3f 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/RangeDescriptor.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/unnestingfunctions/std/RangeDescriptor.java
@@ -67,8 +67,8 @@
return new IUnnestingEvaluator() {
private ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
@SuppressWarnings("rawtypes")
- private ISerializerDeserializer serde = SerializerDeserializerProvider.INSTANCE
- .getSerializerDeserializer(BuiltinType.AINT64);
+ private ISerializerDeserializer serde =
+ SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
private IPointable inputVal = new VoidPointable();
private IScalarEvaluator eval0 = args[0].createScalarEvaluator(ctx);
private IScalarEvaluator eval1 = args[1].createScalarEvaluator(ctx);
diff --git a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
index 6994862..c5eeb65 100644
--- a/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
+++ b/asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/utils/ClusterStateManager.java
@@ -277,8 +277,8 @@
clusterActiveLocations.add(p.getActiveNodeId());
}
}
- clusterPartitionConstraint = new AlgebricksAbsolutePartitionConstraint(
- clusterActiveLocations.toArray(new String[] {}));
+ clusterPartitionConstraint =
+ new AlgebricksAbsolutePartitionConstraint(clusterActiveLocations.toArray(new String[] {}));
}
@Override
@@ -443,8 +443,8 @@
}
private void updateNodeConfig(String nodeId, Map<IOption, Object> configuration) {
- ConfigManager configManager = ((ConfigManagerApplicationConfig) appCtx.getServiceContext().getAppConfig())
- .getConfigManager();
+ ConfigManager configManager =
+ ((ConfigManagerApplicationConfig) appCtx.getServiceContext().getAppConfig()).getConfigManager();
configuration.forEach((key, value) -> {
if (key.section() == Section.NC) {
configManager.set(nodeId, key, value);
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/LicensingIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/LicensingIT.java
index a0e9385..6d4af8c 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/LicensingIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/LicensingIT.java
@@ -76,8 +76,8 @@
@Test
public void ensureNoMissingLicenses() throws IOException {
for (String licenseArtifactName : getLicenseArtifactNames()) {
- final File licenseFile = new File(
- FileUtil.joinPath(installerDir, pathToLicensingFiles(), licenseArtifactName));
+ final File licenseFile =
+ new File(FileUtil.joinPath(installerDir, pathToLicensingFiles(), licenseArtifactName));
List<String> badLines = new ArrayList<>();
for (String line : FileUtils.readLines(licenseFile, StandardCharsets.UTF_8)) {
if (line.matches("^\\s*MISSING:.*")) {
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
index 1d3c1c5..d7eea9a 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NCServiceExecutionIT.java
@@ -55,8 +55,8 @@
// Important paths and files for this test.
// The "target" subdirectory of asterix-server. All outputs go here.
- public static final String TARGET_DIR = StringUtils.join(new String[] { "../asterix-server/target" },
- File.separator);
+ public static final String TARGET_DIR =
+ StringUtils.join(new String[] { "../asterix-server/target" }, File.separator);
// Directory where the NCs create and store all data, as configured by
// src/test/resources/NCServiceExecutionIT/cc.conf.
@@ -65,12 +65,12 @@
// The log directory, where all CC, NCService, and NC logs are written. CC and
// NCService logs are configured on the HyracksVirtualCluster below. NC logs
// are configured in src/test/resources/NCServiceExecutionIT/ncservice*.conf.
- public static final String LOG_DIR = StringUtils.join(new String[] { TARGET_DIR, "failsafe-reports" },
- File.separator);
+ public static final String LOG_DIR =
+ StringUtils.join(new String[] { TARGET_DIR, "failsafe-reports" }, File.separator);
// Directory where *.conf files are located.
- public static final String CONF_DIR = StringUtils
- .join(new String[] { TARGET_DIR, "test-classes", "NCServiceExecutionIT" }, File.separator);
+ public static final String CONF_DIR =
+ StringUtils.join(new String[] { TARGET_DIR, "test-classes", "NCServiceExecutionIT" }, File.separator);
// The app.home specified for HyracksVirtualCluster. The NCService expects
// to find the NC startup script in ${app.home}/bin.
@@ -84,13 +84,13 @@
// Path to the actual AQL test files, which we borrow from asterix-app. This is
// passed to TestExecutor.
- protected static final String TESTS_DIR = StringUtils
- .join(new String[] { ASTERIX_APP_DIR, "src", "test", "resources", "runtimets" }, File.separator);
+ protected static final String TESTS_DIR =
+ StringUtils.join(new String[] { ASTERIX_APP_DIR, "src", "test", "resources", "runtimets" }, File.separator);
// Path that actual results are written to. We create and clean this directory
// here, and also pass it to TestExecutor which writes the test output there.
- public static final String ACTUAL_RESULTS_DIR = StringUtils.join(new String[] { TARGET_DIR, "ittest" },
- File.separator);
+ public static final String ACTUAL_RESULTS_DIR =
+ StringUtils.join(new String[] { TARGET_DIR, "ittest" }, File.separator);
private static final Logger LOGGER = LogManager.getLogger();
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NcLifecycleIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NcLifecycleIT.java
index f69f849..9e3a029 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NcLifecycleIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/NcLifecycleIT.java
@@ -69,7 +69,8 @@
this.tcCtx = tcCtx;
}
- @Rule public TestRule retainLogs = new RetainLogsRule(NCServiceExecutionIT.ASTERIX_APP_DIR, reportPath, this);
+ @Rule
+ public TestRule retainLogs = new RetainLogsRule(NCServiceExecutionIT.ASTERIX_APP_DIR, reportPath, this);
@Before
public void before() throws Exception {
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java
index 7aea282..163e279 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/RecoveryIT.java
@@ -67,8 +67,8 @@
File outdir = new File(PATH_ACTUAL);
outdir.mkdirs();
- File externalTestsJar = new File(
- StringUtils.join(new String[] { "..", "asterix-external-data", "target" }, File.separator))
+ File externalTestsJar =
+ new File(StringUtils.join(new String[] { "..", "asterix-external-data", "target" }, File.separator))
.listFiles((dir, name) -> name.matches("asterix-external-data-.*-tests.jar"))[0];
asterixInstallerPath = new File(System.getProperty("user.dir"));
@@ -106,8 +106,8 @@
public static void tearDown() throws Exception {
File outdir = new File(PATH_ACTUAL);
FileUtils.deleteDirectory(outdir);
- File dataCopyDir = new File(
- ncServiceHomePath + File.separator + ".." + File.separator + ".." + File.separator + "data");
+ File dataCopyDir =
+ new File(ncServiceHomePath + File.separator + ".." + File.separator + ".." + File.separator + "data");
FileUtils.deleteDirectory(dataCopyDir);
TestExecutor.executeScript(pb,
scriptHomePath + File.separator + "setup_teardown" + File.separator + "stop_and_delete.sh");
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java
index 289dbf2..ad1078d 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/ReplicationIT.java
@@ -81,7 +81,8 @@
this.tcCtx = tcCtx;
}
- @Rule public TestRule retainLogs = new RetainLogsRule(NCServiceExecutionIT.ASTERIX_APP_DIR, reportPath, this);
+ @Rule
+ public TestRule retainLogs = new RetainLogsRule(NCServiceExecutionIT.ASTERIX_APP_DIR, reportPath, this);
@Before
public void before() throws Exception {
diff --git a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java
index b6cb030..190cead 100644
--- a/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java
+++ b/asterixdb/asterix-server/src/test/java/org/apache/asterix/server/test/SampleLocalClusterIT.java
@@ -108,8 +108,8 @@
@Test
public void test0_startCluster() throws Exception {
- Process process = new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh"), "-f")
- .inheritIO().start();
+ Process process =
+ new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh"), "-f").inheritIO().start();
Assert.assertEquals(0, process.waitFor());
process = new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/start-sample-cluster.sh")).inheritIO().start();
Assert.assertEquals(0, process.waitFor());
@@ -127,8 +127,8 @@
@Test
public void test2_stopCluster() throws Exception {
- Process process = new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh")).inheritIO()
- .start();
+ Process process =
+ new ProcessBuilder(joinPath(LOCAL_SAMPLES_DIR, "bin/stop-sample-cluster.sh")).inheritIO().start();
Assert.assertEquals(0, process.waitFor());
try {
new URL("http://127.0.0.1:19002").openConnection().connect();
diff --git a/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java b/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java
index d2f52dc..bd5400d 100644
--- a/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java
+++ b/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/template/TemplateHelper.java
@@ -59,8 +59,8 @@
}
public File resolveTemplateFile(File inputFile) throws IOException {
- File outputFile = File.createTempFile("template.", "." +
- inputFile.getName().substring(0, inputFile.getName().lastIndexOf(".template")));
+ File outputFile = File.createTempFile("template.",
+ "." + inputFile.getName().substring(0, inputFile.getName().lastIndexOf(".template")));
outputFile.deleteOnExit();
processFile(inputFile, outputFile);
return outputFile;
@@ -71,7 +71,7 @@
outputFile.getParentFile().mkdirs();
}
try (BufferedReader reader = new BufferedReader(new FileReader(inputFile));
- BufferedWriter writer = new BufferedWriter(new FileWriter(outputFile))) {
+ BufferedWriter writer = new BufferedWriter(new FileWriter(outputFile))) {
String line;
while ((line = reader.readLine()) != null) {
Matcher m = replacementPattern.matcher(line);
diff --git a/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/xml/TestSuiteParser.java b/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/xml/TestSuiteParser.java
index ebd945e..406a762 100644
--- a/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/xml/TestSuiteParser.java
+++ b/asterixdb/asterix-test-framework/src/main/java/org/apache/asterix/testframework/xml/TestSuiteParser.java
@@ -42,7 +42,7 @@
JAXBContext ctx = JAXBContext.newInstance(org.apache.asterix.testframework.xml.TestSuite.class);
Unmarshaller um = ctx.createUnmarshaller();
- return (org.apache.asterix.testframework.xml.TestSuite) um.unmarshal(new SAXSource(saxParser.getXMLReader(),
- new InputSource(testSuiteCatalog.toURI().toString())));
+ return (org.apache.asterix.testframework.xml.TestSuite) um.unmarshal(
+ new SAXSource(saxParser.getXMLReader(), new InputSource(testSuiteCatalog.toURI().toString())));
}
}
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/CustOrdDataGen.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/CustOrdDataGen.java
index b605f73..5127d7b 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/CustOrdDataGen.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/CustOrdDataGen.java
@@ -44,8 +44,8 @@
"Lake St.", "Hill St.", "Park St.", "View St." };
private static final int MIN_STREET_NUM = 1;
private static final int MAX_STREET_NUM = 10000;
- private static final String[] CITIES = { "Seattle", "San Jose", "Mountain View", "Los Angeles", "Sunnyvale",
- "Portland" };
+ private static final String[] CITIES =
+ { "Seattle", "San Jose", "Mountain View", "Los Angeles", "Sunnyvale", "Portland" };
private static final int MIN_INTERESTS = 0;
private static final int MAX_INTERESTS = 5;
@@ -67,13 +67,13 @@
private String[] ORDER_PRIORITIES = { "LOW", "MEDIUM", "HIGH", "PREMIUM" };
private String[] ORDER_STATUSES = { "ORDER_PLACED", "PAYMENT_RECEIVED", "ORDER_SHIPPED", "ORDER_DELIVERED" };
- private String[] firstNames = { "Joe", "John", "Jill", "Gill", "Bill", "William", "Kathy", "Cathey", "Jane",
- "Albert" };
- private String[] lastNames = { "Doe", "Smith", "Li", "Singh", "Williams", "Davis", "Brown", "Wilson", "Moore",
- "Thomas" };
+ private String[] firstNames =
+ { "Joe", "John", "Jill", "Gill", "Bill", "William", "Kathy", "Cathey", "Jane", "Albert" };
+ private String[] lastNames =
+ { "Doe", "Smith", "Li", "Singh", "Williams", "Davis", "Brown", "Wilson", "Moore", "Thomas" };
- private static final String[] UNDECLARED_FIELD_NAMES = { "param1", "param2", "param3", "param4", "param5",
- "param6", "param7", "param8", "param9", "param10" };
+ private static final String[] UNDECLARED_FIELD_NAMES =
+ { "param1", "param2", "param3", "param4", "param5", "param6", "param7", "param8", "param9", "param10" };
private int currentCID = 0;
private int currentOID = 0;
@@ -414,8 +414,8 @@
customersFile.write(customerList.get(ix).getJSON() + "\n");
// generate orders
- int numOrders = Math.abs(rndValue.nextInt()) % (MAX_ORDERS_PER_CUST - MIN_ORDERS_PER_CUST)
- + MIN_ORDERS_PER_CUST;
+ int numOrders =
+ Math.abs(rndValue.nextInt()) % (MAX_ORDERS_PER_CUST - MIN_ORDERS_PER_CUST) + MIN_ORDERS_PER_CUST;
for (int i = 0; i < numOrders; i++) {
ordersBatch[i].generateFieldValues(customerList.get(ix));
ordersList.add(ordersBatch[i]);
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/EventDataGen.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/EventDataGen.java
index b175b42..9737c93 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/EventDataGen.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/datagen/EventDataGen.java
@@ -49,12 +49,12 @@
"Lake St.", "Hill St.", "Park St.", "View St." };
private static final int MIN_STREET_NUM = 1;
private static final int MAX_STREET_NUM = 10000;
- private static final String[] CITIES = { "Seattle", "Irvine", "Laguna Beach", "Los Angeles", "San Clemente",
- "Huntington Beach", "Portland" };
+ private static final String[] CITIES =
+ { "Seattle", "Irvine", "Laguna Beach", "Los Angeles", "San Clemente", "Huntington Beach", "Portland" };
private static final int MIN_ZIP = 100000;
private static final int MAX_ZIP = 999999;
- private static final String[] LAT_LONGS = { "47,-122", "33,-117", "33,-117", "34,-118", "33,-117", "33,-117",
- "45,-122" };
+ private static final String[] LAT_LONGS =
+ { "47,-122", "33,-117", "33,-117", "34,-118", "33,-117", "33,-117", "45,-122" };
private static final int MIN_MEMBERSHIPS = 1;
private static final int MAX_MEMBERSHIPS = 10;
@@ -87,8 +87,8 @@
firstNameIdx = Math.abs(rndValue.nextInt()) % firstNames.length;
lastNameIdx = Math.abs(rndValue.nextInt()) % lastNames.length;
// name = firstNames[firstNameIx] + " " + lastNames[lastNameIx];
- numInterests = Math.abs((rndValue.nextInt()) % (MAX_USER_INTERESTS - MIN_USER_INTERESTS))
- + MIN_USER_INTERESTS;
+ numInterests =
+ Math.abs((rndValue.nextInt()) % (MAX_USER_INTERESTS - MIN_USER_INTERESTS)) + MIN_USER_INTERESTS;
for (int i = 0; i < numInterests; i++) {
interests[i] = Math.abs(rndValue.nextInt()) % INTERESTS.length;
}
@@ -107,8 +107,8 @@
+ MEMBER_SINCE_MIN_YEAR;
int msMo = Math.abs(rndValue.nextInt()) % 12 + 1;
int msDay = Math.abs(rndValue.nextInt()) % 28 + 1;
- member_since_date[i] = msYear + "-" + (msMo < 10 ? "0" : "") + msMo + "-" + (msDay < 10 ? "0" : "")
- + msDay;
+ member_since_date[i] =
+ msYear + "-" + (msMo < 10 ? "0" : "") + msMo + "-" + (msDay < 10 ? "0" : "") + msDay;
}
}
@@ -218,14 +218,14 @@
public static void main(String[] args) throws IOException {
if (args.length != 2) {
- System.err
- .println("MUST PROVIDE 2 PARAMETERS, 1. output directory path and 2. number of records to generate.");
+ System.err.println(
+ "MUST PROVIDE 2 PARAMETERS, 1. output directory path and 2. number of records to generate.");
System.exit(1);
}
String outputFile = args[0];
int numRecords = Integer.parseInt(args[1]);
- Writer userFile = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile + File.separator
- + "user.adm")));
+ Writer userFile = new BufferedWriter(
+ new OutputStreamWriter(new FileOutputStream(outputFile + File.separator + "user.adm")));
EventDataGen dgen = new EventDataGen();
dgen.init();
for (int i = 0; i < numRecords; i++) {
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/DataGeneratorForSpatialIndexEvaluation.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/DataGeneratorForSpatialIndexEvaluation.java
index 60bca549..4f1f3d7 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/DataGeneratorForSpatialIndexEvaluation.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/external/data/DataGeneratorForSpatialIndexEvaluation.java
@@ -34,7 +34,8 @@
public class DataGeneratorForSpatialIndexEvaluation {
- private static final String DUMMY_SIZE_ADJUSTER = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ private static final String DUMMY_SIZE_ADJUSTER =
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
private RandomDateGenerator randDateGen;
@@ -209,12 +210,11 @@
: (date.getMonth() + random.nextInt(endDate.getMonth() - date.getMonth()))
: random.nextInt(12) + 1;
- int day = (year == endDate.getYear())
- ? month == endDate.getMonth()
+ int day =
+ (year == endDate.getYear()) ? month == endDate.getMonth()
? date.getDay() == endDate.getDay() ? endDate.getDay()
: date.getDay() + random.nextInt(endDate.getDay() - date.getDay())
- : random.nextInt(28) + 1
- : random.nextInt(28) + 1;
+ : random.nextInt(28) + 1 : random.nextInt(28) + 1;
recentDate.reset(month, day, year);
return recentDate;
}
@@ -887,8 +887,8 @@
}
public static void main(String[] args) throws Exception {
- DataGeneratorForSpatialIndexEvaluation dg = new DataGeneratorForSpatialIndexEvaluation(
- new InitializationInfo());
+ DataGeneratorForSpatialIndexEvaluation dg =
+ new DataGeneratorForSpatialIndexEvaluation(new InitializationInfo());
TweetMessageIterator tmi = dg.new TweetMessageIterator(1, new GULongIDGenerator(0, (byte) 0));
int len = 0;
int count = 0;
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/tbltoadm/TblToAdm.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/tbltoadm/TblToAdm.java
index 26656f6..b1bf335 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/tbltoadm/TblToAdm.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/tbltoadm/TblToAdm.java
@@ -29,7 +29,8 @@
private static final String SEPARATOR_STRING = "\\|\\s*";
private static final char SEPARATOR_CHAR = '|';
- private static void convertFile(String inputFileName, String outputFileName, String scmFileName) throws IOException {
+ private static void convertFile(String inputFileName, String outputFileName, String scmFileName)
+ throws IOException {
File scmFile = new File(scmFileName);
File inFile = new File(inputFileName);
File outFile = new File(outputFileName);
diff --git a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java
index b6d44e0..de77870 100644
--- a/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java
+++ b/asterixdb/asterix-tools/src/test/java/org/apache/asterix/tools/test/AdmDataGenTest.java
@@ -47,8 +47,8 @@
private static final String EXTENSION_QUERY = "adg";
private static final String FILENAME_IGNORE = "ignore.txt";
private static final String FILENAME_ONLY = "only.txt";
- private static final String PATH_BASE = "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "adgts"
- + SEPARATOR;
+ private static final String PATH_BASE =
+ "src" + SEPARATOR + "test" + SEPARATOR + "resources" + SEPARATOR + "adgts" + SEPARATOR;
private static final String PATH_QUERIES = PATH_BASE + "dgscripts" + SEPARATOR;
private static final String PATH_EXPECTED = PATH_BASE + "results" + SEPARATOR;
private static final String PATH_ACTUAL = "adgtest" + SEPARATOR;
@@ -109,12 +109,12 @@
@Test
public void test() throws Exception {
- String scriptFileShort = scriptFile.getPath().substring(PATH_QUERIES.length())
- .replace(SEPARATOR.charAt(0), '/');
+ String scriptFileShort =
+ scriptFile.getPath().substring(PATH_QUERIES.length()).replace(SEPARATOR.charAt(0), '/');
if (!only.isEmpty()) {
if (!only.contains(scriptFileShort)) {
- LOGGER.info("SKIP TEST: \"" + scriptFile.getPath()
- + "\" \"only.txt\" not empty and not in \"only.txt\".");
+ LOGGER.info(
+ "SKIP TEST: \"" + scriptFile.getPath() + "\" \"only.txt\" not empty and not in \"only.txt\".");
}
Assume.assumeTrue(only.contains(scriptFileShort));
}
@@ -186,8 +186,8 @@
lineActual = readerActual.readLine();
// Assert.assertEquals(null, lineActual);
if (lineActual != null) {
- throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< \n> "
- + lineActual);
+ throw new Exception(
+ "Result for " + scriptFile + " changed at line " + num + ":\n< \n> " + lineActual);
}
} finally {
readerExpected.close();
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallbackFactory.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallbackFactory.java
index 972668a..d9088e1 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallbackFactory.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/opcallbacks/SecondaryIndexSearchOperationCallbackFactory.java
@@ -30,8 +30,8 @@
private static final long serialVersionUID = 1L;
@Override
- public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx, IOperatorNodePushable operatorNodePushable)
- throws HyracksDataException {
+ public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx,
+ IOperatorNodePushable operatorNodePushable) throws HyracksDataException {
return new SecondaryIndexSearchOperationCallback(resourceId);
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
index a1d0dd5..ca22a84 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/resource/PersistentLocalResourceRepository.java
@@ -131,8 +131,8 @@
storageRoots = new Path[ioManager.getIODevices().size()];
final List<IODeviceHandle> ioDevices = ioManager.getIODevices();
for (int i = 0; i < ioDevices.size(); i++) {
- storageRoots[i] = Paths.get(ioDevices.get(i).getMount().getAbsolutePath(),
- StorageConstants.STORAGE_ROOT_DIR_NAME);
+ storageRoots[i] =
+ Paths.get(ioDevices.get(i).getMount().getAbsolutePath(), StorageConstants.STORAGE_ROOT_DIR_NAME);
}
createStorageRoots();
resourceCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHED_RESOURCES).build();
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java
index 4d9c94a5..1acc0ac 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/LockManagerStats.java
@@ -38,27 +38,39 @@
this.loggingPeriod = loggingPeriod;
}
- final void lock() { lCnt.incrementAndGet(); }
- final void instantLock() { ilCnt.incrementAndGet(); }
- final void tryLock() { tlCnt.incrementAndGet(); }
- final void instantTryLock() { itlCnt.incrementAndGet(); }
- final void unlock() { ulCnt.incrementAndGet(); }
- final void releaseLocks() { rlCnt.incrementAndGet(); }
+ final void lock() {
+ lCnt.incrementAndGet();
+ }
+
+ final void instantLock() {
+ ilCnt.incrementAndGet();
+ }
+
+ final void tryLock() {
+ tlCnt.incrementAndGet();
+ }
+
+ final void instantTryLock() {
+ itlCnt.incrementAndGet();
+ }
+
+ final void unlock() {
+ ulCnt.incrementAndGet();
+ }
+
+ final void releaseLocks() {
+ rlCnt.incrementAndGet();
+ }
final int requestSum() {
- return lCnt.intValue() + ilCnt.intValue() + tlCnt.intValue()
- + itlCnt.intValue() + ulCnt.intValue() + rlCnt.intValue();
+ return lCnt.intValue() + ilCnt.intValue() + tlCnt.intValue() + itlCnt.intValue() + ulCnt.intValue()
+ + rlCnt.intValue();
}
final StringBuilder append(StringBuilder sb) {
- sb.append("{")
- .append(" lock : ").append(lCnt)
- .append(", instantLock : ").append(ilCnt)
- .append(", tryLock : ").append(tlCnt)
- .append(", instantTryLock : ").append(itlCnt)
- .append(", unlock : ").append(ulCnt)
- .append(", releaseLocks : ").append(rlCnt)
- .append(" }");
+ sb.append("{").append(" lock : ").append(lCnt).append(", instantLock : ").append(ilCnt).append(", tryLock : ")
+ .append(tlCnt).append(", instantTryLock : ").append(itlCnt).append(", unlock : ").append(ulCnt)
+ .append(", releaseLocks : ").append(rlCnt).append(" }");
return sb;
}
@@ -68,8 +80,7 @@
}
final void logCounters(final Logger logger, final Level lvl, boolean always) {
- if (logger.isEnabled(lvl)
- && (always || requestSum() % loggingPeriod == 0)) {
+ if (logger.isEnabled(lvl) && (always || requestSum() % loggingPeriod == 0)) {
logger.log(lvl, toString());
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java
index 436745d..6e59217 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroup.java
@@ -89,7 +89,7 @@
}
public String toString() {
- return "{ id : " + hashCode() + ", first : " + TypeUtil.Global.toString(firstResourceIndex.get()) + ", " +
- "waiters : " + (hasWaiters() ? "true" : "false") + " }";
+ return "{ id : " + hashCode() + ", first : " + TypeUtil.Global.toString(firstResourceIndex.get()) + ", "
+ + "waiters : " + (hasWaiters() ? "true" : "false") + " }";
}
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroupTable.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroupTable.java
index 213ccd9..3759f96 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroupTable.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/locking/ResourceGroupTable.java
@@ -43,7 +43,8 @@
ResourceGroup get(int dId, int entityHashValue) {
// TODO ensure good properties of hash function
int h = Math.abs(dId ^ entityHashValue);
- if (h < 0) h = 0;
+ if (h < 0)
+ h = 0;
return table[h % size];
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
index 614591b..bc487fe 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogBuffer.java
@@ -220,8 +220,8 @@
reusableTxnId.setId(logRecord.getTxnId());
reusableDatasetId.setId(logRecord.getDatasetId());
txnCtx = txnSubsystem.getTransactionManager().getTransactionContext(reusableTxnId);
- txnSubsystem.getLockManager()
- .unlock(reusableDatasetId, logRecord.getPKHashValue(), LockMode.ANY, txnCtx);
+ txnSubsystem.getLockManager().unlock(reusableDatasetId, logRecord.getPKHashValue(),
+ LockMode.ANY, txnCtx);
txnCtx.notifyEntityCommitted();
if (txnSubsystem.getTransactionProperties().isCommitProfilerEnabled()) {
txnSubsystem.incrementEntityCommitCount();
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java
index 840a19b..e08bebe 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/logging/LogManager.java
@@ -408,8 +408,8 @@
* The log file which contains the checkpointLSN has been reached.
* The oldest log file being accessed by a LogReader has been reached.
*/
- if (id >= checkpointLSNLogFileID || (txnLogFileId2ReaderCount.containsKey(id)
- && txnLogFileId2ReaderCount.get(id) > 0)) {
+ if (id >= checkpointLSNLogFileID
+ || (txnLogFileId2ReaderCount.containsKey(id) && txnLogFileId2ReaderCount.get(id) > 0)) {
break;
}
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java
index 386c1c4..f8ab952 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/AbstractCheckpointManager.java
@@ -112,8 +112,7 @@
} catch (ClosedByInterruptException e) {
Thread.currentThread().interrupt();
if (LOGGER.isWarnEnabled()) {
- LOGGER.log(Level.WARN, "Interrupted while reading checkpoint file: " + file.getAbsolutePath(),
- e);
+ LOGGER.log(Level.WARN, "Interrupted while reading checkpoint file: " + file.getAbsolutePath(), e);
}
throw new ACIDException(e);
} catch (IOException e) {
diff --git a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java
index 487bc84..a541bd9 100644
--- a/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java
+++ b/asterixdb/asterix-transactions/src/main/java/org/apache/asterix/transaction/management/service/recovery/CheckpointManager.java
@@ -45,8 +45,8 @@
@Override
public synchronized void doSharpCheckpoint() throws HyracksDataException {
LOGGER.info("Starting sharp checkpoint...");
- final IDatasetLifecycleManager datasetLifecycleManager = txnSubsystem.getApplicationContext()
- .getDatasetLifecycleManager();
+ final IDatasetLifecycleManager datasetLifecycleManager =
+ txnSubsystem.getApplicationContext().getDatasetLifecycleManager();
datasetLifecycleManager.flushAllDatasets();
capture(SHARP_CHECKPOINT_LSN, true);
txnSubsystem.getLogManager().renewLogFiles();
@@ -66,8 +66,8 @@
boolean checkpointSucceeded = minFirstLSN >= checkpointTargetLSN;
if (!checkpointSucceeded) {
// Flush datasets with indexes behind target checkpoint LSN
- IDatasetLifecycleManager datasetLifecycleManager = txnSubsystem.getApplicationContext()
- .getDatasetLifecycleManager();
+ IDatasetLifecycleManager datasetLifecycleManager =
+ txnSubsystem.getApplicationContext().getDatasetLifecycleManager();
datasetLifecycleManager.scheduleAsyncFlushForLaggingDatasets(checkpointTargetLSN);
}
capture(minFirstLSN, false);
diff --git a/asterixdb/pom.xml b/asterixdb/pom.xml
index 5098fff..ec72dec 100644
--- a/asterixdb/pom.xml
+++ b/asterixdb/pom.xml
@@ -282,6 +282,21 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>net.revelc.code.formatter</groupId>
+ <artifactId>formatter-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>${source-format.goal}</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <configFile>${root.dir}/AsterixCodeFormatProfile.xml</configFile>
+ <skipFormatting>${source-format.skip}</skipFormatting>
+ </configuration>
+ </plugin>
</plugins>
<pluginManagement>
<plugins>
@@ -709,31 +724,6 @@
</plugins>
</build>
</profile>
- <profile>
- <id>source-format</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>net.revelc.code.formatter</groupId>
- <artifactId>formatter-maven-plugin</artifactId>
- <executions>
- <execution>
- <goals>
- <goal>${source-format.goal}</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <configFile>${root.dir}/AsterixCodeFormatProfile.xml</configFile>
- <skipFormatting>${source-format.skip}</skipFormatting>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
</profiles>
<modules>
diff --git a/asterixdb/src/main/assembly/source.xml b/asterixdb/src/main/assembly/source.xml
index 6552829..78f4382 100644
--- a/asterixdb/src/main/assembly/source.xml
+++ b/asterixdb/src/main/assembly/source.xml
@@ -29,7 +29,7 @@
<outputDirectory>.</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
<excludes>
- <exclude>**/${project.build.directory}/**</exclude>
+ <exclude>${project.build.directory}/**</exclude>
<exclude>release.properties</exclude>
</excludes>
</fileSet>
diff --git a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
index 64e328a..d1feb08 100644
--- a/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
+++ b/hyracks-fullstack/algebricks/algebricks-common/src/main/java/org/apache/hyracks/algebricks/common/exceptions/AlgebricksException.java
@@ -108,8 +108,8 @@
@Override
public String getMessage() {
if (msgCache == null) {
- msgCache = new CachedMessage(
- ErrorMessageUtil.formatMessage(component, errorCode, super.getMessage(), params));
+ msgCache =
+ new CachedMessage(ErrorMessageUtil.formatMessage(component, errorCode, super.getMessage(), params));
}
return msgCache.message;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java b/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java
index 7328278..29c178a 100644
--- a/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java
+++ b/hyracks-fullstack/algebricks/algebricks-compiler/src/main/java/org/apache/hyracks/algebricks/compiler/rewriter/rulecontrollers/SequentialFirstRuleCheckFixpointRuleController.java
@@ -59,8 +59,7 @@
if (ruleCollection instanceof List) {
rules = (List<IAlgebraicRewriteRule>) ruleCollection;
} else {
- throw AlgebricksException.create(ErrorCode.RULECOLLECTION_NOT_INSTANCE_OF_LIST,
- this.getClass().getName());
+ throw AlgebricksException.create(ErrorCode.RULECOLLECTION_NOT_INSTANCE_OF_LIST, this.getClass().getName());
}
if (rules.isEmpty()) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
index 1012fef..663661c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IHyracksJobBuilder.java
@@ -46,7 +46,8 @@
/**
* inputs are numbered starting from 0
*/
- public void contributeGraphEdge(ILogicalOperator src, int srcOutputIndex, ILogicalOperator dest, int destInputIndex);
+ public void contributeGraphEdge(ILogicalOperator src, int srcOutputIndex, ILogicalOperator dest,
+ int destInputIndex);
public void contributeConnector(ILogicalOperator exchgOp, IConnectorDescriptor conn);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
index 707a7db..dd7e065 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/ILogicalOperator.java
@@ -65,7 +65,7 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException;
+ throws AlgebricksException;
// variables
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
index 8c0ab2f..2a92aba 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/IPhysicalOperator.java
@@ -53,7 +53,7 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException;
+ throws AlgebricksException;
public void disableJobGenBelowMe();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
index 58f4c60..45b7edc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/base/LogicalExpressionTag.java
@@ -19,5 +19,7 @@
package org.apache.hyracks.algebricks.core.algebra.base;
public enum LogicalExpressionTag {
- FUNCTION_CALL, VARIABLE, CONSTANT
+ FUNCTION_CALL,
+ VARIABLE,
+ CONSTANT
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java
index 31726d2..71f7b52 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/ExpressionRuntimeProvider.java
@@ -44,7 +44,7 @@
@Override
public IAggregateEvaluatorFactory createAggregateFunctionFactory(AggregateFunctionCallExpression expr,
IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
return lejg.createAggregateFunctionFactory(expr, env, inputSchemas, context);
}
@@ -58,14 +58,14 @@
@Override
public IRunningAggregateEvaluatorFactory createRunningAggregateFunctionFactory(StatefulFunctionCallExpression expr,
IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
return lejg.createRunningAggregateFunctionFactory(expr, env, inputSchemas, context);
}
@Override
public IUnnestingEvaluatorFactory createUnnestingFunctionFactory(UnnestingFunctionCallExpression expr,
IVariableTypeEnvironment env, IOperatorSchema[] inputSchemas, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
return lejg.createUnnestingFunctionFactory(expr, env, inputSchemas, context);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
index 2816477..d022bff 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/expressions/IMergeAggregationExpressionFactory.java
@@ -24,6 +24,6 @@
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
public interface IMergeAggregationExpressionFactory {
- ILogicalExpression createMergeAggregation(LogicalVariable originalAggVariable, ILogicalExpression expr, IOptimizationContext env)
- throws AlgebricksException;
+ ILogicalExpression createMergeAggregation(LogicalVariable originalAggVariable, ILogicalExpression expr,
+ IOptimizationContext env) throws AlgebricksException;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
index 07e4f98..2da7cf3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/functions/AlgebricksBuiltinFunctions.java
@@ -43,8 +43,8 @@
// booleans
public final static FunctionIdentifier NOT = new FunctionIdentifier(ALGEBRICKS_NS, "not", 1);
- public final static FunctionIdentifier AND = new FunctionIdentifier(ALGEBRICKS_NS, "and",
- FunctionIdentifier.VARARGS);
+ public final static FunctionIdentifier AND =
+ new FunctionIdentifier(ALGEBRICKS_NS, "and", FunctionIdentifier.VARARGS);
public final static FunctionIdentifier OR = new FunctionIdentifier(ALGEBRICKS_NS, "or", FunctionIdentifier.VARARGS);
// numerics
@@ -56,7 +56,8 @@
// nulls
public final static FunctionIdentifier IS_NULL = new FunctionIdentifier(ALGEBRICKS_NS, "is-null", 1);
- private static final Map<FunctionIdentifier, ComparisonKind> comparisonFunctions = new HashMap<FunctionIdentifier, ComparisonKind>();
+ private static final Map<FunctionIdentifier, ComparisonKind> comparisonFunctions =
+ new HashMap<FunctionIdentifier, ComparisonKind>();
static {
comparisonFunctions.put(AlgebricksBuiltinFunctions.EQ, ComparisonKind.EQ);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
index 5d6f40c..c163c9f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AbstractScanOperator.java
@@ -40,7 +40,6 @@
return variables;
}
-
public void setVariables(List<LogicalVariable> variables) {
this.variables = variables;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
index c58dd67..b4a59a8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AggregateOperator.java
@@ -98,13 +98,13 @@
@Override
public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
- IVariableTypeEnvironment env = new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(),
- ctx.getMetadataProvider());
+ IVariableTypeEnvironment env =
+ new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMetadataProvider());
IVariableTypeEnvironment env2 = ctx.getOutputTypeEnvironment(inputs.get(0).getValue());
int n = variables.size();
for (int i = 0; i < n; i++) {
- Object t = ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(),
- ctx.getMetadataProvider(), env2);
+ Object t = ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(), ctx.getMetadataProvider(),
+ env2);
env.setVarType(variables.get(i), t);
}
return env;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
index ef1760b..861d74c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/AssignOperator.java
@@ -92,8 +92,8 @@
env.setVarType(variables.get(i), ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(),
ctx.getMetadataProvider(), env));
if (expressions.get(i).getValue().getExpressionTag() == LogicalExpressionTag.VARIABLE) {
- LogicalVariable var = ((VariableReferenceExpression) expressions.get(i).getValue())
- .getVariableReference();
+ LogicalVariable var =
+ ((VariableReferenceExpression) expressions.get(i).getValue()).getVariableReference();
for (List<LogicalVariable> list : env.getCorrelatedMissableVariableLists()) {
if (list.contains(var)) {
list.add(variables.get(i));
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
index aa694b8..365d77e 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/GroupByOperator.java
@@ -296,9 +296,8 @@
// The groupAll flag can only be set if group by columns are empty.
private void checkGroupAll(boolean groupAll) {
if (groupAll && !gByList.isEmpty()) {
- throw new IllegalStateException(
- "Conflicting parameters for GROUP BY: there should be no GROUP BY keys "
- + "when the GROUP ALL flag is set to true");
+ throw new IllegalStateException("Conflicting parameters for GROUP BY: there should be no GROUP BY keys "
+ + "when the GROUP ALL flag is set to true");
}
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java
index 02765f1..31a1294 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IndexInsertDeleteUpsertOperator.java
@@ -181,8 +181,8 @@
return prevAdditionalFilteringExpression;
}
- public void
- setBeforeOpAdditionalFilteringExpression(Mutable<ILogicalExpression> prevAdditionalFilteringExpression) {
+ public void setBeforeOpAdditionalFilteringExpression(
+ Mutable<ILogicalExpression> prevAdditionalFilteringExpression) {
this.prevAdditionalFilteringExpression = prevAdditionalFilteringExpression;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
index 0595ff8..8a06ec4 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/IntersectOperator.java
@@ -155,8 +155,8 @@
Object expectedType = expected.getVarType(expectedVariables.get(i));
Object actualType = actual.getVarType(actualVariables.get(i));
if (!expectedType.equals(actualType)) {
- AlgebricksConfig.ALGEBRICKS_LOGGER.warn(
- "Type of two variables are not equal." + expectedVariables.get(i) + " is of type: "
+ AlgebricksConfig.ALGEBRICKS_LOGGER
+ .warn("Type of two variables are not equal." + expectedVariables.get(i) + " is of type: "
+ expectedType + actualVariables.get(i) + " is of type: " + actualType);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
index 2ae27f1..797c5eb 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/LeftOuterJoinOperator.java
@@ -64,9 +64,9 @@
for (int i = 0; i < n; i++) {
envPointers[i] = new OpRefTypeEnvPointer(inputs.get(i), ctx);
}
- PropagatingTypeEnvironment env = new PropagatingTypeEnvironment(ctx.getExpressionTypeComputer(),
- ctx.getMissableTypeComputer(), ctx.getMetadataProvider(), TypePropagationPolicy.LEFT_OUTER,
- envPointers);
+ PropagatingTypeEnvironment env =
+ new PropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMissableTypeComputer(),
+ ctx.getMetadataProvider(), TypePropagationPolicy.LEFT_OUTER, envPointers);
List<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(inputs.get(1).getValue(), liveVars); // live variables from outer branch can be null together
env.getCorrelatedMissableVariableLists().add(liveVars);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java
index 6c10d60..9e35885 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/MaterializeOperator.java
@@ -43,7 +43,8 @@
}
@Override
- public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
+ public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
index c2e244b..ef16613 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/RunningAggregateOperator.java
@@ -72,10 +72,8 @@
IVariableTypeEnvironment env = createPropagatingAllInputsTypeEnvironment(ctx);
int n = variables.size();
for (int i = 0; i < n; i++) {
- env.setVarType(
- variables.get(i),
- ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(), ctx.getMetadataProvider(),
- env));
+ env.setVarType(variables.get(i), ctx.getExpressionTypeComputer().getType(expressions.get(i).getValue(),
+ ctx.getMetadataProvider(), env));
}
return env;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
index b04b28c..6fb767c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/ScriptOperator.java
@@ -99,8 +99,8 @@
@Override
public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
- IVariableTypeEnvironment env = new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(),
- ctx.getMetadataProvider());
+ IVariableTypeEnvironment env =
+ new NonPropagatingTypeEnvironment(ctx.getExpressionTypeComputer(), ctx.getMetadataProvider());
for (Pair<LogicalVariable, Object> p : scriptDesc.getVarTypePairs()) {
env.setVarType(p.first, p.second);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java
index c69ead7..d61ad07 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/TokenizeOperator.java
@@ -50,12 +50,9 @@
private final List<Object> tokenizeVarTypes;
private List<Mutable<ILogicalExpression>> additionalFilteringExpressions;
- public TokenizeOperator(IDataSourceIndex<?, ?> dataSourceIndex,
- List<Mutable<ILogicalExpression>> primaryKeyExprs,
- List<Mutable<ILogicalExpression>> secondaryKeyExprs,
- List<LogicalVariable> tokenizeVars,
- Mutable<ILogicalExpression> filterExpr, Kind operation,
- boolean bulkload, boolean isPartitioned,
+ public TokenizeOperator(IDataSourceIndex<?, ?> dataSourceIndex, List<Mutable<ILogicalExpression>> primaryKeyExprs,
+ List<Mutable<ILogicalExpression>> secondaryKeyExprs, List<LogicalVariable> tokenizeVars,
+ Mutable<ILogicalExpression> filterExpr, Kind operation, boolean bulkload, boolean isPartitioned,
List<Object> tokenizeVarTypes) {
this.dataSourceIndex = dataSourceIndex;
this.primaryKeyExprs = primaryKeyExprs;
@@ -76,9 +73,7 @@
}
@Override
- public boolean acceptExpressionTransform(
- ILogicalExpressionReferenceTransform visitor)
- throws AlgebricksException {
+ public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform visitor) throws AlgebricksException {
boolean b = false;
for (int i = 0; i < primaryKeyExprs.size(); i++) {
if (visitor.transform(primaryKeyExprs.get(i))) {
@@ -94,8 +89,7 @@
}
@Override
- public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg)
- throws AlgebricksException {
+ public <R, T> R accept(ILogicalOperatorVisitor<R, T> visitor, T arg) throws AlgebricksException {
return visitor.visitTokenizeOperator(this, arg);
}
@@ -109,8 +103,8 @@
return new VariablePropagationPolicy() {
@Override
- public void propagateVariables(IOperatorSchema target,
- IOperatorSchema... sources) throws AlgebricksException {
+ public void propagateVariables(IOperatorSchema target, IOperatorSchema... sources)
+ throws AlgebricksException {
target.addAllVariables(sources[0]);
for (LogicalVariable v : tokenizeVars) {
target.addVariable(v);
@@ -126,8 +120,7 @@
}
@Override
- public IVariableTypeEnvironment computeOutputTypeEnvironment(
- ITypingContext ctx) throws AlgebricksException {
+ public IVariableTypeEnvironment computeOutputTypeEnvironment(ITypingContext ctx) throws AlgebricksException {
IVariableTypeEnvironment env = createPropagatingAllInputsTypeEnvironment(ctx);
// If the secondary index is not length-partitioned, create one new
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
index b1ca744..3a0068d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/UpdateOperator.java
@@ -35,7 +35,8 @@
}
@Override
- public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform) throws AlgebricksException {
+ public boolean acceptExpressionTransform(ILogicalExpressionReferenceTransform transform)
+ throws AlgebricksException {
// TODO Auto-generated method stub
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
index 0c53685..fb1bcec 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/IsomorphismOperatorVisitor.java
@@ -77,7 +77,8 @@
public class IsomorphismOperatorVisitor implements ILogicalOperatorVisitor<Boolean, ILogicalOperator> {
- private final Map<LogicalVariable, LogicalVariable> variableMapping = new HashMap<LogicalVariable, LogicalVariable>();
+ private final Map<LogicalVariable, LogicalVariable> variableMapping =
+ new HashMap<LogicalVariable, LogicalVariable>();
public IsomorphismOperatorVisitor() {
}
@@ -89,9 +90,9 @@
return Boolean.FALSE;
}
AggregateOperator aggOpArg = (AggregateOperator) copyAndSubstituteVar(op, arg);
- boolean isomorphic = VariableUtilities.varListEqualUnordered(
- getPairList(op.getVariables(), op.getExpressions()),
- getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
+ boolean isomorphic =
+ VariableUtilities.varListEqualUnordered(getPairList(op.getVariables(), op.getExpressions()),
+ getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
return isomorphic;
}
@@ -103,9 +104,9 @@
return Boolean.FALSE;
}
RunningAggregateOperator aggOpArg = (RunningAggregateOperator) copyAndSubstituteVar(op, arg);
- boolean isomorphic = VariableUtilities.varListEqualUnordered(
- getPairList(op.getVariables(), op.getExpressions()),
- getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
+ boolean isomorphic =
+ VariableUtilities.varListEqualUnordered(getPairList(op.getVariables(), op.getExpressions()),
+ getPairList(aggOpArg.getVariables(), aggOpArg.getExpressions()));
return isomorphic;
}
@@ -142,8 +143,10 @@
GroupByOperator gbyOpArg = (GroupByOperator) copyAndSubstituteVar(op, arg);
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyListsArg = gbyOpArg.getGroupByList();
- List<Pair<LogicalVariable, ILogicalExpression>> listLeft = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
- List<Pair<LogicalVariable, ILogicalExpression>> listRight = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+ List<Pair<LogicalVariable, ILogicalExpression>> listLeft =
+ new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+ List<Pair<LogicalVariable, ILogicalExpression>> listRight =
+ new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : keyLists) {
listLeft.add(new Pair<LogicalVariable, ILogicalExpression>(pair.first, pair.second.getValue()));
@@ -249,9 +252,9 @@
return Boolean.FALSE;
}
AssignOperator assignOpArg = (AssignOperator) copyAndSubstituteVar(op, arg);
- boolean isomorphic = VariableUtilities.varListEqualUnordered(
- getPairList(op.getVariables(), op.getExpressions()),
- getPairList(assignOpArg.getVariables(), assignOpArg.getExpressions()));
+ boolean isomorphic =
+ VariableUtilities.varListEqualUnordered(getPairList(op.getVariables(), op.getExpressions()),
+ getPairList(assignOpArg.getVariables(), assignOpArg.getExpressions()));
return isomorphic;
}
@@ -659,7 +662,8 @@
public List<Pair<LogicalVariable, ILogicalExpression>> getPairList(List<LogicalVariable> vars,
List<Mutable<ILogicalExpression>> exprs) throws AlgebricksException {
- List<Pair<LogicalVariable, ILogicalExpression>> list = new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
+ List<Pair<LogicalVariable, ILogicalExpression>> list =
+ new ArrayList<Pair<LogicalVariable, ILogicalExpression>>();
if (vars.size() != exprs.size()) {
throw new AlgebricksException("variable list size does not equal to expression list size ");
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
index a75e7d6..74afdf5 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/LogicalOperatorDeepCopyWithNewVariablesVisitor.java
@@ -203,8 +203,8 @@
private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderExpressionReferencePairList(
List<Pair<IOrder, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
- ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>(
- list.size());
+ ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>> listCopy =
+ new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>(list.size());
for (Pair<IOrder, Mutable<ILogicalExpression>> pair : list) {
listCopy.add(new Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>(deepCopyOrder(pair.first),
exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
@@ -248,8 +248,8 @@
private List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> deepCopyVariableExpressionReferencePairList(
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> listCopy = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(
- list.size());
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> listCopy =
+ new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(list.size());
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : list) {
listCopy.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(deepCopyVariable(pair.first),
exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
@@ -317,8 +317,8 @@
@Override
public ILogicalOperator visitDataScanOperator(DataSourceScanOperator op, ILogicalOperator arg)
throws AlgebricksException {
- DataSourceScanOperator opCopy = new DataSourceScanOperator(deepCopyVariableList(op.getVariables()),
- op.getDataSource());
+ DataSourceScanOperator opCopy =
+ new DataSourceScanOperator(deepCopyVariableList(op.getVariables()), op.getDataSource());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@@ -326,8 +326,8 @@
@Override
public ILogicalOperator visitDistinctOperator(DistinctOperator op, ILogicalOperator arg)
throws AlgebricksException {
- DistinctOperator opCopy = new DistinctOperator(
- exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
+ DistinctOperator opCopy =
+ new DistinctOperator(exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@@ -349,10 +349,10 @@
@Override
public ILogicalOperator visitGroupByOperator(GroupByOperator op, ILogicalOperator arg) throws AlgebricksException {
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByListCopy = deepCopyVariableExpressionReferencePairList(
- op.getGroupByList());
- List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorListCopy = deepCopyVariableExpressionReferencePairList(
- op.getDecorList());
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByListCopy =
+ deepCopyVariableExpressionReferencePairList(op.getGroupByList());
+ List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorListCopy =
+ deepCopyVariableExpressionReferencePairList(op.getDecorList());
List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
GroupByOperator opCopy = new GroupByOperator(groupByListCopy, decorListCopy, nestedPlansCopy, op.isGroupAll());
@@ -364,10 +364,10 @@
@Override
public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg)
throws AlgebricksException {
- InnerJoinOperator opCopy = new InnerJoinOperator(
- exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
- deepCopyOperatorReference(op.getInputs().get(0), arg),
- deepCopyOperatorReference(op.getInputs().get(1), arg));
+ InnerJoinOperator opCopy =
+ new InnerJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
+ deepCopyOperatorReference(op.getInputs().get(0), arg),
+ deepCopyOperatorReference(op.getInputs().get(1), arg));
copyAnnotations(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
return opCopy;
@@ -376,10 +376,10 @@
@Override
public ILogicalOperator visitLeftOuterJoinOperator(LeftOuterJoinOperator op, ILogicalOperator arg)
throws AlgebricksException {
- LeftOuterJoinOperator opCopy = new LeftOuterJoinOperator(
- exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
- deepCopyOperatorReference(op.getInputs().get(0), arg),
- deepCopyOperatorReference(op.getInputs().get(1), arg));
+ LeftOuterJoinOperator opCopy =
+ new LeftOuterJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
+ deepCopyOperatorReference(op.getInputs().get(0), arg),
+ deepCopyOperatorReference(op.getInputs().get(1), arg));
copyAnnotations(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
return opCopy;
@@ -396,8 +396,8 @@
@Override
public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
throws AlgebricksException {
- Mutable<ILogicalOperator> dataSourceReference = arg == null ? op.getDataSourceReference()
- : new MutableObject<>(arg);
+ Mutable<ILogicalOperator> dataSourceReference =
+ arg == null ? op.getDataSourceReference() : new MutableObject<>(arg);
NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(dataSourceReference);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
@@ -495,8 +495,8 @@
int index = 0;
for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple : op.getVariableMappings()) {
LogicalVariable producedVar = deepCopyVariable(triple.third);
- Triple<LogicalVariable, LogicalVariable, LogicalVariable> copiedTriple = new Triple<>(
- liveVarsInLeftInput.get(index), liveVarsInRightInput.get(index), producedVar);
+ Triple<LogicalVariable, LogicalVariable, LogicalVariable> copiedTriple =
+ new Triple<>(liveVarsInLeftInput.get(index), liveVarsInRightInput.get(index), producedVar);
copiedTriples.add(copiedTriple);
++index;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
index 7543e5f..600714b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/OperatorDeepCopyVisitor.java
@@ -369,8 +369,8 @@
return newObjs;
}
- private List<Pair<IOrder, Mutable<ILogicalExpression>>>
- deepCopyOrderAndExpression(List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
+ private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderAndExpression(
+ List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs = new ArrayList<>();
for (Pair<IOrder, Mutable<ILogicalExpression>> pair : ordersAndExprs) {
newOrdersAndExprs.add(new Pair<>(pair.first, deepCopyExpressionRef(pair.second)));
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
index 7221e81..69fe746 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/logical/visitors/VariableUtilities.java
@@ -139,7 +139,7 @@
public static void substituteVariables(ILogicalOperator op,
List<Pair<LogicalVariable, LogicalVariable>> oldVarNewVarMapHistory, ITypingContext ctx)
- throws AlgebricksException {
+ throws AlgebricksException {
for (Pair<LogicalVariable, LogicalVariable> entry : oldVarNewVarMapHistory) {
VariableUtilities.substituteVariables(op, entry.first, entry.second, ctx);
}
@@ -165,8 +165,8 @@
public static void substituteVariables(ILogicalOperator op, LogicalVariable v1, LogicalVariable v2,
boolean goThroughNts, ITypingContext ctx) throws AlgebricksException {
- ILogicalOperatorVisitor<Void, Pair<LogicalVariable, LogicalVariable>> visitor = new SubstituteVariableVisitor(
- goThroughNts, ctx);
+ ILogicalOperatorVisitor<Void, Pair<LogicalVariable, LogicalVariable>> visitor =
+ new SubstituteVariableVisitor(goThroughNts, ctx);
op.accept(visitor, new Pair<LogicalVariable, LogicalVariable>(v1, v2));
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
index 799a6af..6f8d5e9 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractExchangePOperator.java
@@ -33,8 +33,8 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
- Pair<IConnectorDescriptor, TargetConstraint> connPair = createConnectorDescriptor(builder.getJobSpec(), op,
- opSchema, context);
+ Pair<IConnectorDescriptor, TargetConstraint> connPair =
+ createConnectorDescriptor(builder.getJobSpec(), op, opSchema, context);
builder.contributeConnectorWithTargetConstraint(op, connPair.first, connPair.second);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
index f6a1bc4..6a81005 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractHashJoinPOperator.java
@@ -96,10 +96,10 @@
switch (partitioningType) {
case PAIRWISE:
pp1 = new UnorderedPartitionedProperty(new ListSet<>(keysLeftBranch),
- context.getComputationNodeDomain());
+ context.getComputationNodeDomain());
pp2 = new UnorderedPartitionedProperty(new ListSet<>(keysRightBranch),
- context.getComputationNodeDomain());
- break;
+ context.getComputationNodeDomain());
+ break;
case BROADCAST:
pp1 = new RandomPartitioningProperty(context.getComputationNodeDomain());
pp2 = new BroadcastPartitioningProperty(context.getComputationNodeDomain());
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
index 4afcbc8..aea9b3e 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractJoinPOperator.java
@@ -25,7 +25,8 @@
public abstract class AbstractJoinPOperator extends AbstractPhysicalOperator {
public enum JoinPartitioningType {
- PAIRWISE, BROADCAST
+ PAIRWISE,
+ BROADCAST
}
protected final JoinKind kind;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
index 0fb667a..43cde22 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPhysicalOperator.java
@@ -72,8 +72,8 @@
}
protected PhysicalRequirements emptyUnaryRequirements() {
- StructuralPropertiesVector[] req = new StructuralPropertiesVector[] {
- StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR };
+ StructuralPropertiesVector[] req =
+ new StructuralPropertiesVector[] { StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR };
return new PhysicalRequirements(req, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
@@ -117,7 +117,7 @@
protected AlgebricksPipeline[] compileSubplans(IOperatorSchema outerPlanSchema,
AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
AlgebricksPipeline[] subplans = new AlgebricksPipeline[npOp.getNestedPlans().size()];
PlanCompiler pc = new PlanCompiler(context);
int i = 0;
@@ -129,7 +129,7 @@
private AlgebricksPipeline buildPipelineWithProjection(ILogicalPlan p, IOperatorSchema outerPlanSchema,
AbstractOperatorWithNestedPlans npOp, IOperatorSchema opSchema, PlanCompiler pc)
- throws AlgebricksException {
+ throws AlgebricksException {
if (p.getRoots().size() > 1) {
throw new NotImplementedException("Nested plans with several roots are not supported.");
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
index 75970ac..64e50ed 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AbstractPreclusteredGroupByPOperator.java
@@ -155,7 +155,6 @@
Set<LogicalVariable> gbvars = new ListSet<>(columnList);
LocalGroupingProperty groupProp = new LocalGroupingProperty(gbvars, new ArrayList<>(columnList));
-
boolean goon = true;
for (ILogicalPlan p : gby.getNestedPlans()) {
// try to propagate secondary order requirements from nested
@@ -232,8 +231,7 @@
tl.add(((VariableReferenceExpression) decorPair.second.getValue()).getVariableReference());
fdList.add(new FunctionalDependency(hd, tl));
}
- if (allOk && PropertiesUtil.matchLocalProperties(localProps, props,
- new HashMap<>(), fdList)) {
+ if (allOk && PropertiesUtil.matchLocalProperties(localProps, props, new HashMap<>(), fdList)) {
localProps = props;
}
}
@@ -242,8 +240,7 @@
IPartitioningProperty pp = null;
AbstractLogicalOperator aop = (AbstractLogicalOperator) op;
if (aop.getExecutionMode() == ExecutionMode.PARTITIONED) {
- pp = new UnorderedPartitionedProperty(new ListSet<>(columnList),
- context.getComputationNodeDomain());
+ pp = new UnorderedPartitionedProperty(new ListSet<>(columnList), context.getComputationNodeDomain());
}
pv[0] = new StructuralPropertiesVector(pp, localProps);
return new PhysicalRequirements(pv, IPartitioningRequirementsCoordinator.NO_COORDINATION);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
index f16f49a..147d5cc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AggregatePOperator.java
@@ -60,11 +60,11 @@
AggregateOperator aggOp = (AggregateOperator) op;
ILogicalOperator op2 = op.getInputs().get(0).getValue();
if (aggOp.getExecutionMode() != AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
- deliveredProperties = new StructuralPropertiesVector(op2.getDeliveredPhysicalProperties()
- .getPartitioningProperty(), new ArrayList<>());
+ deliveredProperties = new StructuralPropertiesVector(
+ op2.getDeliveredPhysicalProperties().getPartitioningProperty(), new ArrayList<>());
} else {
- deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED,
- new ArrayList<>());
+ deliveredProperties =
+ new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, new ArrayList<>());
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
index 5aed63e..995f6e0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/AssignPOperator.java
@@ -88,14 +88,14 @@
// TODO push projections into the operator
int[] projectionList = JobGenHelper.projectAllVariables(opSchema);
- AssignRuntimeFactory runtime = new AssignRuntimeFactory(outColumns, evalFactories, projectionList,
- flushFramesRapidly);
+ AssignRuntimeFactory runtime =
+ new AssignRuntimeFactory(outColumns, evalFactories, projectionList, flushFramesRapidly);
// contribute one Asterix framewriter
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
if (cardinalityConstraint > 0) {
- AlgebricksCountPartitionConstraint countConstraint = new AlgebricksCountPartitionConstraint(
- cardinalityConstraint);
+ AlgebricksCountPartitionConstraint countConstraint =
+ new AlgebricksCountPartitionConstraint(cardinalityConstraint);
builder.contributeMicroOperator(assign, runtime, recDesc, countConstraint);
} else {
builder.contributeMicroOperator(assign, runtime, recDesc);
@@ -119,7 +119,6 @@
this.cardinalityConstraint = cardinality;
}
-
@Override
public boolean expensiveThanMaterialization() {
return false;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java
index dda5456..2204637 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/BulkloadPOperator.java
@@ -76,8 +76,8 @@
List<LogicalVariable> scanVariables = new ArrayList<>();
scanVariables.addAll(primaryKeys);
scanVariables.add(new LogicalVariable(-1));
- IPhysicalPropertiesVector physicalProps = dataSource.getPropertiesProvider()
- .computePropertiesVector(scanVariables);
+ IPhysicalPropertiesVector physicalProps =
+ dataSource.getPropertiesProvider().computePropertiesVector(scanVariables);
StructuralPropertiesVector spv = new StructuralPropertiesVector(physicalProps.getPartitioningProperty(),
physicalProps.getLocalProperties());
return new PhysicalRequirements(new IPhysicalPropertiesVector[] { spv },
@@ -95,7 +95,7 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
+ throws AlgebricksException {
InsertDeleteUpsertOperator insertDeleteOp = (InsertDeleteUpsertOperator) op;
assert insertDeleteOp.getOperation() == Kind.INSERT;
assert insertDeleteOp.isBulkload();
@@ -104,9 +104,9 @@
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getInsertRuntime(dataSource,
- propagatedSchema, typeEnv, primaryKeys, payload, additionalFilteringKeys, additionalNonFilterVars,
- inputDesc, context, spec, true);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getInsertRuntime(dataSource, propagatedSchema, typeEnv, primaryKeys, payload,
+ additionalFilteringKeys, additionalNonFilterVars, inputDesc, context, spec, true);
builder.contributeHyracksOperator(insertDeleteOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = insertDeleteOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
index 4a5ac5a..1421cef 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DataSourceScanPOperator.java
@@ -109,9 +109,9 @@
List<LogicalVariable> vars = scan.getVariables();
List<LogicalVariable> projectVars = scan.getProjectVariables();
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = mp.getScannerRuntime(dataSource, vars,
- projectVars, scan.isProjectPushed(), scan.getMinFilterVars(), scan.getMaxFilterVars(), opSchema,
- typeEnv, context, builder.getJobSpec(), implConfig);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p =
+ mp.getScannerRuntime(dataSource, vars, projectVars, scan.isProjectPushed(), scan.getMinFilterVars(),
+ scan.getMaxFilterVars(), opSchema, typeEnv, context, builder.getJobSpec(), implConfig);
builder.contributeHyracksOperator(scan, p.first);
if (p.second != null) {
builder.contributeAlgebricksPartitionConstraint(p.first, p.second);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
index b3e8385..178f2a1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/DistributeResultPOperator.java
@@ -100,11 +100,11 @@
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- IPrinterFactory[] pf = JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op),
- context, columns);
+ IPrinterFactory[] pf =
+ JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op), context, columns);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getResultHandleRuntime(
- resultOp.getDataSink(), columns, pf, inputDesc, true, spec);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getResultHandleRuntime(resultOp.getDataSink(), columns, pf, inputDesc, true, spec);
builder.contributeHyracksOperator(resultOp, runtimeAndConstraints.first);
ILogicalOperator src = resultOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
index eecd066..5ee967d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ExternalGroupByPOperator.java
@@ -148,7 +148,7 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
+ throws AlgebricksException {
List<LogicalVariable> gbyCols = getGbyColumns();
int keys[] = JobGenHelper.variablesToFieldIndexes(gbyCols, inputSchemas[0]);
GroupByOperator gby = (GroupByOperator) op;
@@ -221,20 +221,20 @@
}
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(gbyCols,
- aggOpInputEnv, context);
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
- IBinaryHashFunctionFamily[] hashFunctionFactories = JobGenHelper.variablesToBinaryHashFunctionFamilies(gbyCols,
- aggOpInputEnv, context);
+ IBinaryComparatorFactory[] comparatorFactories =
+ JobGenHelper.variablesToAscBinaryComparatorFactories(gbyCols, aggOpInputEnv, context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
+ IBinaryHashFunctionFamily[] hashFunctionFactories =
+ JobGenHelper.variablesToBinaryHashFunctionFamilies(gbyCols, aggOpInputEnv, context);
ISerializedAggregateEvaluatorFactory[] merges = new ISerializedAggregateEvaluatorFactory[n];
List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
IOperatorSchema[] localInputSchemas = new IOperatorSchema[1];
localInputSchemas[0] = new OperatorSchemaImpl();
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression aggFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
aggFun.getUsedVariables(usedVars);
}
i = 0;
@@ -248,16 +248,16 @@
localInputSchemas[0].addVariable(usedVar);
}
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression mergeFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression mergeFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
merges[i] = expressionRuntimeProvider.createSerializableAggregateFunctionFactory(mergeFun, aggOpInputEnv,
localInputSchemas, context);
}
IAggregatorDescriptorFactory aggregatorFactory = new SerializableAggregatorDescriptorFactory(aff);
IAggregatorDescriptorFactory mergeFactory = new SerializableAggregatorDescriptorFactory(merges);
- INormalizedKeyComputerFactory normalizedKeyFactory = JobGenHelper
- .variablesToAscNormalizedKeyComputerFactory(gbyCols, aggOpInputEnv, context);
+ INormalizedKeyComputerFactory normalizedKeyFactory =
+ JobGenHelper.variablesToAscNormalizedKeyComputerFactory(gbyCols, aggOpInputEnv, context);
// Calculates the hash table size (# of unique hash values) based on the budget and a tuple size.
int memoryBudgetInBytes = context.getFrameSize() * frameLimit;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
index 17322b6..c5ce871 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HashPartitionMergeExchangePOperator.java
@@ -82,8 +82,8 @@
@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
- IPartitioningProperty p = new UnorderedPartitionedProperty(new ListSet<LogicalVariable>(partitionFields),
- domain);
+ IPartitioningProperty p =
+ new UnorderedPartitionedProperty(new ListSet<LogicalVariable>(partitionFields), domain);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op.getInputs().get(0).getValue();
List<ILocalStructuralProperty> op2Locals = op2.getDeliveredPhysicalProperties().getLocalProperties();
List<ILocalStructuralProperty> locals = new ArrayList<ILocalStructuralProperty>();
@@ -108,8 +108,8 @@
columns.add(new OrderColumn(var, oc.getOrder()));
}
orderProps.add(new LocalOrderProperty(columns));
- StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(null,
- orderProps) };
+ StructuralPropertiesVector[] r =
+ new StructuralPropertiesVector[] { new StructuralPropertiesVector(null, orderProps) };
return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
@@ -155,8 +155,8 @@
j++;
}
- IConnectorDescriptor conn = new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields,
- comparatorFactories, nkcf);
+ IConnectorDescriptor conn =
+ new MToNPartitioningMergingConnectorDescriptor(spec, tpcf, sortFields, comparatorFactories, nkcf);
return new Pair<IConnectorDescriptor, TargetConstraint>(conn, null);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
index 83591ee..301b8f1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/HybridHashJoinPOperator.java
@@ -116,10 +116,10 @@
int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
- IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
- IBinaryHashFunctionFamily[] hashFunFamilies = JobGenHelper.variablesToBinaryHashFunctionFamilies(keysLeftBranch,
- env, context);
+ IBinaryHashFunctionFactory[] hashFunFactories =
+ JobGenHelper.variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
+ IBinaryHashFunctionFamily[] hashFunFamilies =
+ JobGenHelper.variablesToBinaryHashFunctionFamilies(keysLeftBranch, env, context);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
int i = 0;
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
@@ -128,13 +128,13 @@
comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
}
- IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider = context
- .getPredicateEvaluatorFactoryProvider();
+ IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider =
+ context.getPredicateEvaluatorFactoryProvider();
IPredicateEvaluatorFactory predEvaluatorFactory = predEvaluatorFactoryProvider == null ? null
: predEvaluatorFactoryProvider.getPredicateEvaluatorFactory(keysLeft, keysRight);
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc;
boolean optimizedHashJoin = true;
@@ -173,8 +173,8 @@
comparatorFactories, recDescriptor, predEvaluatorFactory, false, null);
break;
case LEFT_OUTER:
- IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1]
- .getSize()];
+ IMissingWriterFactory[] nonMatchWriterFactories =
+ new IMissingWriterFactory[inputSchemas[1].getSize()];
for (int j = 0; j < nonMatchWriterFactories.length; j++) {
nonMatchWriterFactories[j] = context.getMissingWriterFactory();
}
@@ -207,8 +207,8 @@
predEvaluatorFactory);
break;
case LEFT_OUTER:
- IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[inputSchemas[1]
- .getSize()];
+ IMissingWriterFactory[] nonMatchWriterFactories =
+ new IMissingWriterFactory[inputSchemas[1].getSize()];
for (int j = 0; j < nonMatchWriterFactories.length; j++) {
nonMatchWriterFactories[j] = context.getMissingWriterFactory();
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
index a1d496d..9c29c53 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryHashJoinPOperator.java
@@ -87,8 +87,8 @@
int[] keysLeft = JobGenHelper.variablesToFieldIndexes(keysLeftBranch, inputSchemas[0]);
int[] keysRight = JobGenHelper.variablesToFieldIndexes(keysRightBranch, inputSchemas[1]);
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
- IBinaryHashFunctionFactory[] hashFunFactories = JobGenHelper
- .variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
+ IBinaryHashFunctionFactory[] hashFunFactories =
+ JobGenHelper.variablesToBinaryHashFunctionFactories(keysLeftBranch, env, context);
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keysLeft.length];
int i = 0;
IBinaryComparatorFactoryProvider bcfp = context.getBinaryComparatorFactoryProvider();
@@ -97,13 +97,13 @@
comparatorFactories[i++] = bcfp.getBinaryComparatorFactory(t, true);
}
- IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider = context
- .getPredicateEvaluatorFactoryProvider();
+ IPredicateEvaluatorFactoryProvider predEvaluatorFactoryProvider =
+ context.getPredicateEvaluatorFactoryProvider();
IPredicateEvaluatorFactory predEvaluatorFactory = (predEvaluatorFactoryProvider == null ? null
: predEvaluatorFactoryProvider.getPredicateEvaluatorFactory(keysLeft, keysRight));
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc = null;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
index 48461b1..d304421 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/InMemoryStableSortPOperator.java
@@ -56,7 +56,8 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
int n = sortColumns.length;
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java
index 14032a1..fa0fb1a 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexBulkloadPOperator.java
@@ -90,8 +90,8 @@
List<LogicalVariable> scanVariables = new ArrayList<>();
scanVariables.addAll(primaryKeys);
scanVariables.add(new LogicalVariable(-1));
- IPhysicalPropertiesVector physicalProps = dataSourceIndex.getDataSource().getPropertiesProvider()
- .computePropertiesVector(scanVariables);
+ IPhysicalPropertiesVector physicalProps =
+ dataSourceIndex.getDataSource().getPropertiesProvider().computePropertiesVector(scanVariables);
List<ILocalStructuralProperty> localProperties = new ArrayList<>();
List<OrderColumn> orderColumns = new ArrayList<OrderColumn>();
// Data needs to be sorted based on the [token, number of token, PK]
@@ -106,8 +106,8 @@
orderColumns.add(new OrderColumn(pkVar, OrderKind.ASC));
}
localProperties.add(new LocalOrderProperty(orderColumns));
- StructuralPropertiesVector spv = new StructuralPropertiesVector(physicalProps.getPartitioningProperty(),
- localProperties);
+ StructuralPropertiesVector spv =
+ new StructuralPropertiesVector(physicalProps.getPartitioningProperty(), localProperties);
return new PhysicalRequirements(new IPhysicalPropertiesVector[] { spv },
IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
@@ -132,9 +132,9 @@
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getIndexInsertRuntime(
- dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys,
- additionalFilteringKeys, filterExpr, inputDesc, context, spec, true);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getIndexInsertRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys,
+ secondaryKeys, additionalFilteringKeys, filterExpr, inputDesc, context, spec, true);
builder.contributeHyracksOperator(indexInsertDeleteOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = indexInsertDeleteOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java
index ce86e58..a66db35 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IndexInsertDeleteUpsertPOperator.java
@@ -96,8 +96,8 @@
for (int i = 0; i < numOfAdditionalNonFilteringFields; i++) {
scanVariables.add(new LogicalVariable(-1));
}
- IPhysicalPropertiesVector r = dataSourceIndex.getDataSource().getPropertiesProvider()
- .computePropertiesVector(scanVariables);
+ IPhysicalPropertiesVector r =
+ dataSourceIndex.getDataSource().getPropertiesProvider().computePropertiesVector(scanVariables);
r.getLocalProperties().clear();
IPhysicalPropertiesVector[] requirements = new IPhysicalPropertiesVector[1];
requirements[0] = r;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java
index 0baffc9..1d36cc0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/IntersectPOperator.java
@@ -87,8 +87,8 @@
public void computeDeliveredProperties(ILogicalOperator iop, IOptimizationContext context)
throws AlgebricksException {
IntersectOperator op = (IntersectOperator) iop;
- IPartitioningProperty pp = op.getInputs().get(0).getValue().getDeliveredPhysicalProperties()
- .getPartitioningProperty();
+ IPartitioningProperty pp =
+ op.getInputs().get(0).getValue().getDeliveredPhysicalProperties().getPartitioningProperty();
HashMap<LogicalVariable, LogicalVariable> varMaps = new HashMap<>(op.getOutputVars().size());
for (int i = 0; i < op.getOutputVars().size(); i++) {
@@ -114,9 +114,8 @@
int nInput = logicalOp.getNumInput();
int[][] compareFields = new int[nInput][];
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper
- .variablesToAscBinaryComparatorFactories(logicalOp.getCompareVariables(0),
- context.getTypeEnvironment(op), context);
+ IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
+ logicalOp.getCompareVariables(0), context.getTypeEnvironment(op), context);
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
INormalizedKeyComputerFactory nkcf = null;
@@ -147,9 +146,8 @@
IntersectOperatorDescriptor opDescriptor;
try {
- opDescriptor =
- new IntersectOperatorDescriptor(spec, nInput, compareFields, extraFields, nkcf, comparatorFactories,
- recordDescriptor);
+ opDescriptor = new IntersectOperatorDescriptor(spec, nInput, compareFields, extraFields, nkcf,
+ comparatorFactories, recordDescriptor);
} catch (HyracksException e) {
throw new AlgebricksException(e);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java
index c55a4ae..a48e3c2 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MaterializePOperator.java
@@ -70,10 +70,10 @@
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
- MaterializingOperatorDescriptor materializationOpDesc = new MaterializingOperatorDescriptor(
- builder.getJobSpec(), recDescriptor, isSingleActivity);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+ MaterializingOperatorDescriptor materializationOpDesc =
+ new MaterializingOperatorDescriptor(builder.getJobSpec(), recDescriptor, isSingleActivity);
contributeOpDesc(builder, (AbstractLogicalOperator) op, materializationOpDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
index 2772ee7..629afa3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/MicroPreclusteredGroupByPOperator.java
@@ -63,14 +63,15 @@
int fdColumns[] = getFdColumns(gby, inputSchemas[0]);
// compile subplans and set the gby op. schema accordingly
AlgebricksPipeline[] subplans = compileSubplans(inputSchemas[0], gby, opSchema, context);
- IAggregatorDescriptorFactory aggregatorFactory = new NestedPlansAccumulatingAggregatorFactory(subplans, keys,
- fdColumns);
+ IAggregatorDescriptorFactory aggregatorFactory =
+ new NestedPlansAccumulatingAggregatorFactory(subplans, keys, fdColumns);
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
- columnList, env, context);
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
- RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()),
- inputSchemas[0], context);
+ IBinaryComparatorFactory[] comparatorFactories =
+ JobGenHelper.variablesToAscBinaryComparatorFactories(columnList, env, context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
+ RecordDescriptor inputRecordDesc = JobGenHelper.mkRecordDescriptor(
+ context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
MicroPreClusteredGroupRuntimeFactory runtime = new MicroPreClusteredGroupRuntimeFactory(keys,
comparatorFactories, aggregatorFactory, inputRecordDesc, recordDescriptor, null);
builder.contributeMicroOperator(gby, runtime, recordDescriptor);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java
index 80ed8fd..4d7bd7e1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/NestedLoopJoinPOperator.java
@@ -129,15 +129,15 @@
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op;
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IOperatorSchema[] conditionInputSchemas = new IOperatorSchema[1];
conditionInputSchemas[0] = propagatedSchema;
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(join.getCondition().getValue(),
context.getTypeEnvironment(op), conditionInputSchemas, context);
- ITuplePairComparatorFactory comparatorFactory = new TuplePairEvaluatorFactory(cond,
- context.getBinaryBooleanInspectorFactory());
+ ITuplePairComparatorFactory comparatorFactory =
+ new TuplePairEvaluatorFactory(cond, context.getBinaryBooleanInspectorFactory());
IOperatorDescriptorRegistry spec = builder.getJobSpec();
IOperatorDescriptor opDesc = null;
@@ -212,8 +212,8 @@
int innerIndex) throws HyracksDataException {
compositeTupleRef.reset(outerAccessor, outerIndex, innerAccessor, innerIndex);
condEvaluator.evaluate(compositeTupleRef, p);
- boolean result = binaryBooleanInspector.getBooleanValue(p.getByteArray(), p.getStartOffset(),
- p.getLength());
+ boolean result =
+ binaryBooleanInspector.getBooleanValue(p.getByteArray(), p.getStartOffset(), p.getLength());
if (result) {
return 0;
} else {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
index 78e4795..0e0953c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/PreclusteredGroupByPOperator.java
@@ -82,10 +82,10 @@
}
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- IBinaryComparatorFactory[] comparatorFactories = JobGenHelper.variablesToAscBinaryComparatorFactories(
- columnList, context.getTypeEnvironment(op), context);
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
+ IBinaryComparatorFactory[] comparatorFactories = JobGenHelper
+ .variablesToAscBinaryComparatorFactories(columnList, context.getTypeEnvironment(op), context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
PreclusteredGroupOperatorDescriptor opDesc = new PreclusteredGroupOperatorDescriptor(spec, keys,
comparatorFactories, aggregatorFactory, recordDescriptor, groupAll, framesLimit);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java
index cba8f97..d17c0d9 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RandomPartitionExchangePOperator.java
@@ -50,9 +50,9 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
- Pair<IConnectorDescriptor, TargetConstraint> connPair = createConnectorDescriptor(builder.getJobSpec(), op,
- opSchema, context);
+ throws AlgebricksException {
+ Pair<IConnectorDescriptor, TargetConstraint> connPair =
+ createConnectorDescriptor(builder.getJobSpec(), op, opSchema, context);
builder.contributeConnectorWithTargetConstraint(op, connPair.first, connPair.second);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java
index 225ffa0..6630d32 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionExchangePOperator.java
@@ -58,7 +58,8 @@
private INodeDomain domain;
private IRangeMap rangeMap;
- public RangePartitionExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain, IRangeMap rangeMap) {
+ public RangePartitionExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain,
+ IRangeMap rangeMap) {
this.partitioningFields = partitioningFields;
this.domain = domain;
this.rangeMap = rangeMap;
@@ -79,7 +80,8 @@
@Override
public void computeDeliveredProperties(ILogicalOperator op, IOptimizationContext context) {
- IPartitioningProperty p = new OrderedPartitionedProperty(new ArrayList<OrderColumn>(partitioningFields), domain);
+ IPartitioningProperty p =
+ new OrderedPartitionedProperty(new ArrayList<OrderColumn>(partitioningFields), domain);
this.deliveredProperties = new StructuralPropertiesVector(p, new LinkedList<ILocalStructuralProperty>());
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java
index f56a5dc..ec32a53 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RangePartitionMergeExchangePOperator.java
@@ -63,7 +63,8 @@
private INodeDomain domain;
private IRangeMap rangeMap;
- public RangePartitionMergeExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain, IRangeMap rangeMap) {
+ public RangePartitionMergeExchangePOperator(List<OrderColumn> partitioningFields, INodeDomain domain,
+ IRangeMap rangeMap) {
this.partitioningFields = partitioningFields;
this.domain = domain;
this.rangeMap = rangeMap;
@@ -113,8 +114,8 @@
columns.add(new OrderColumn(var, oc.getOrder()));
}
orderProps.add(new LocalOrderProperty(columns));
- StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(null,
- orderProps) };
+ StructuralPropertiesVector[] r =
+ new StructuralPropertiesVector[] { new StructuralPropertiesVector(null, orderProps) };
return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
index 74739da..25d31d2 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/ReplicatePOperator.java
@@ -43,15 +43,15 @@
IOperatorSchema propagatedSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
throws AlgebricksException {
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
ReplicateOperator rop = (ReplicateOperator) op;
int outputArity = rop.getOutputArity();
boolean[] outputMaterializationFlags = rop.getOutputMaterializationFlags();
- ReplicateOperatorDescriptor splitOpDesc = new ReplicateOperatorDescriptor(spec, recDescriptor, outputArity,
- outputMaterializationFlags);
+ ReplicateOperatorDescriptor splitOpDesc =
+ new ReplicateOperatorDescriptor(spec, recDescriptor, outputArity, outputMaterializationFlags);
contributeOpDesc(builder, (AbstractLogicalOperator) op, splitOpDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
index 8e4ca18..3a6ba74 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/RunningAggregatePOperator.java
@@ -89,8 +89,8 @@
// TODO push projections into the operator
int[] projectionList = JobGenHelper.projectAllVariables(opSchema);
- RunningAggregateRuntimeFactory runtime = new RunningAggregateRuntimeFactory(outColumns, runningAggFuns,
- projectionList);
+ RunningAggregateRuntimeFactory runtime =
+ new RunningAggregateRuntimeFactory(outColumns, runningAggFuns, projectionList);
// contribute one Asterix framewriter
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
index d0b7b47..5084c18 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkPOperator.java
@@ -68,7 +68,7 @@
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
- return emptyUnaryRequirements(op.getInputs().size());
+ return emptyUnaryRequirements(op.getInputs().size());
}
@Override
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
index 35f9444..f76b69b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SinkWritePOperator.java
@@ -91,17 +91,18 @@
LogicalVariable v = varRef.getVariableReference();
columns[i++] = inputSchemas[0].findVariable(v);
}
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
- RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0],
- context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
+ RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
+ context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- IPrinterFactory[] pf = JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op),
- context, columns);
+ IPrinterFactory[] pf =
+ JobGenHelper.mkPrinterFactories(inputSchemas[0], context.getTypeEnvironment(op), context, columns);
IMetadataProvider<?, ?> mp = context.getMetadataProvider();
- Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> runtime = mp.getWriteFileRuntime(write.getDataSink(),
- columns, pf, inputDesc);
+ Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> runtime =
+ mp.getWriteFileRuntime(write.getDataSink(), columns, pf, inputDesc);
builder.contributeMicroOperator(write, runtime.first, recDesc, runtime.second);
ILogicalOperator src = write.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java
index c08ff85..1aeeca9 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortGroupByPOperator.java
@@ -188,8 +188,8 @@
AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) exprRef.getValue();
aff[i++] = expressionRuntimeProvider.createAggregateFunctionFactory(aggFun, aggOpInputEnv, inputSchemas,
context);
- intermediateTypes.add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv,
- context.getMetadataProvider()));
+ intermediateTypes
+ .add(partialAggregationTypeComputer.getType(aggFun, aggOpInputEnv, context.getMetadataProvider()));
}
int[] keyAndDecFields = new int[keys.length + fdColumns.length];
@@ -227,16 +227,16 @@
}
i++;
}
- RecordDescriptor recordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
+ RecordDescriptor recordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
IAggregateEvaluatorFactory[] merges = new IAggregateEvaluatorFactory[n];
List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
IOperatorSchema[] localInputSchemas = new IOperatorSchema[1];
localInputSchemas[0] = new OperatorSchemaImpl();
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression aggFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression aggFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
aggFun.getUsedVariables(usedVars);
}
i = 0;
@@ -250,18 +250,18 @@
localInputSchemas[0].addVariable(usedVar);
}
for (i = 0; i < n; i++) {
- AggregateFunctionCallExpression mergeFun = (AggregateFunctionCallExpression) aggOp.getMergeExpressions()
- .get(i).getValue();
+ AggregateFunctionCallExpression mergeFun =
+ (AggregateFunctionCallExpression) aggOp.getMergeExpressions().get(i).getValue();
merges[i] = expressionRuntimeProvider.createAggregateFunctionFactory(mergeFun, aggOpInputEnv,
localInputSchemas, context);
}
- RecordDescriptor partialAggRecordDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- localInputSchemas[0], context);
+ RecordDescriptor partialAggRecordDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), localInputSchemas[0], context);
- IAggregatorDescriptorFactory aggregatorFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(aff,
- keyAndDecFields);
- IAggregatorDescriptorFactory mergeFactory = new SimpleAlgebricksAccumulatingAggregatorFactory(merges,
- keyAndDecFields);
+ IAggregatorDescriptorFactory aggregatorFactory =
+ new SimpleAlgebricksAccumulatingAggregatorFactory(aff, keyAndDecFields);
+ IAggregatorDescriptorFactory mergeFactory =
+ new SimpleAlgebricksAccumulatingAggregatorFactory(merges, keyAndDecFields);
INormalizedKeyComputerFactory normalizedKeyFactory = null;
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
@@ -269,9 +269,9 @@
normalizedKeyFactory = null;
}
Object type = aggOpInputEnv.getVarType(gbyCols.get(0));
- normalizedKeyFactory = orderColumns[0].getOrder() == OrderKind.ASC ? nkcfProvider
- .getNormalizedKeyComputerFactory(type, true) : nkcfProvider
- .getNormalizedKeyComputerFactory(type, false);
+ normalizedKeyFactory =
+ orderColumns[0].getOrder() == OrderKind.ASC ? nkcfProvider.getNormalizedKeyComputerFactory(type, true)
+ : nkcfProvider.getNormalizedKeyComputerFactory(type, false);
SortGroupByOperatorDescriptor gbyOpDesc = new SortGroupByOperatorDescriptor(spec, frameLimit, keys,
keyAndDecFields, normalizedKeyFactory, compFactories, aggregatorFactory, mergeFactory,
partialAggRecordDescriptor, recordDescriptor, false);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
index 81f6e6b..6c02dca 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SortMergeExchangePOperator.java
@@ -124,8 +124,8 @@
IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
List<ILocalStructuralProperty> localProps = new ArrayList<ILocalStructuralProperty>(sortColumns.length);
localProps.add(new LocalOrderProperty(Arrays.asList(sortColumns)));
- StructuralPropertiesVector[] r = new StructuralPropertiesVector[] { new StructuralPropertiesVector(null,
- localProps) };
+ StructuralPropertiesVector[] r =
+ new StructuralPropertiesVector[] { new StructuralPropertiesVector(null, localProps) };
return new PhysicalRequirements(r, IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
index 923e56a..c9fde4b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/SplitPOperator.java
@@ -51,8 +51,8 @@
boolean propageToAllBranchAsDefault = sop.getPropageToAllBranchAsDefault();
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op),
- propagatedSchema, context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IScalarEvaluatorFactory brachingExprEvalFactory = expressionRuntimeProvider.createEvaluatorFactory(
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
index 120c1c4..3a4249b 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StableSortPOperator.java
@@ -72,10 +72,10 @@
@Override
public void contributeRuntimeOperator(IHyracksJobBuilder builder, JobGenContext context, ILogicalOperator op,
IOperatorSchema opSchema, IOperatorSchema[] inputSchemas, IOperatorSchema outerPlanSchema)
- throws AlgebricksException {
+ throws AlgebricksException {
IOperatorDescriptorRegistry spec = builder.getJobSpec();
- RecordDescriptor recDescriptor = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema,
- context);
+ RecordDescriptor recDescriptor =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
int n = sortColumns.length;
int[] sortFields = new int[n];
IBinaryComparatorFactory[] comps = new IBinaryComparatorFactory[n];
@@ -100,8 +100,8 @@
// topK == -1 means that a topK value is not provided.
if (topK == -1) {
- ExternalSortOperatorDescriptor sortOpDesc = new ExternalSortOperatorDescriptor(spec, maxNumberOfFrames,
- sortFields, nkcf, comps, recDescriptor);
+ ExternalSortOperatorDescriptor sortOpDesc =
+ new ExternalSortOperatorDescriptor(spec, maxNumberOfFrames, sortFields, nkcf, comps, recDescriptor);
contributeOpDesc(builder, (AbstractLogicalOperator) op, sortOpDesc);
ILogicalOperator src = op.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, op, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
index 99be356..da75da8 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamLimitPOperator.java
@@ -62,8 +62,8 @@
ILogicalOperator op2 = op.getInputs().get(0).getValue();
if (limitOp.getExecutionMode() == AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
//partitioning property: unpartitioned; local property: whatever from the child
- deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED, op2
- .getDeliveredPhysicalProperties().getLocalProperties());
+ deliveredProperties = new StructuralPropertiesVector(IPartitioningProperty.UNPARTITIONED,
+ op2.getDeliveredPhysicalProperties().getLocalProperties());
} else {
deliveredProperties = op2.getDeliveredPhysicalProperties().clone();
}
@@ -89,13 +89,13 @@
LimitOperator limit = (LimitOperator) op;
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IVariableTypeEnvironment env = context.getTypeEnvironment(op);
- IScalarEvaluatorFactory maxObjectsFact = expressionRuntimeProvider.createEvaluatorFactory(limit.getMaxObjects()
- .getValue(), env, inputSchemas, context);
+ IScalarEvaluatorFactory maxObjectsFact = expressionRuntimeProvider
+ .createEvaluatorFactory(limit.getMaxObjects().getValue(), env, inputSchemas, context);
ILogicalExpression offsetExpr = limit.getOffset().getValue();
- IScalarEvaluatorFactory offsetFact = (offsetExpr == null) ? null : expressionRuntimeProvider
- .createEvaluatorFactory(offsetExpr, env, inputSchemas, context);
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
- context);
+ IScalarEvaluatorFactory offsetFact = (offsetExpr == null) ? null
+ : expressionRuntimeProvider.createEvaluatorFactory(offsetExpr, env, inputSchemas, context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
StreamLimitRuntimeFactory runtime = new StreamLimitRuntimeFactory(maxObjectsFact, offsetFact, null,
context.getBinaryIntegerInspectorFactory());
builder.contributeMicroOperator(limit, runtime, recDesc);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
index 184cbbc..3ff7dc1 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamProjectPOperator.java
@@ -68,8 +68,8 @@
projectionList[i++] = pos;
}
StreamProjectRuntimeFactory runtime = new StreamProjectRuntimeFactory(projectionList, flushFramesRapidly);
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
- context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
builder.contributeMicroOperator(project, runtime, recDesc);
ILogicalOperator src = project.getInputs().get(0).getValue();
builder.contributeGraphEdge(src, 0, project, 0);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
index e87f3f6..ddde5f3 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StreamSelectPOperator.java
@@ -66,10 +66,10 @@
IExpressionRuntimeProvider expressionRuntimeProvider = context.getExpressionRuntimeProvider();
IScalarEvaluatorFactory cond = expressionRuntimeProvider.createEvaluatorFactory(
select.getCondition().getValue(), context.getTypeEnvironment(op), inputSchemas, context);
- StreamSelectRuntimeFactory runtime = new StreamSelectRuntimeFactory(cond, null,
- context.getBinaryBooleanInspectorFactory(), select.getRetainMissing(),
- inputSchemas[0].findVariable(select.getMissingPlaceholderVariable()),
- context.getMissingWriterFactory());
+ StreamSelectRuntimeFactory runtime =
+ new StreamSelectRuntimeFactory(cond, null, context.getBinaryBooleanInspectorFactory(),
+ select.getRetainMissing(), inputSchemas[0].findVariable(select.getMissingPlaceholderVariable()),
+ context.getMissingWriterFactory());
// contribute one Asterix framewriter
RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), opSchema, context);
builder.contributeMicroOperator(select, runtime, recDesc);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
index 1f5159d..01e9a0c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/StringStreamingScriptPOperator.java
@@ -65,8 +65,8 @@
StringStreamingScriptDescription sssd = (StringStreamingScriptDescription) scriptDesc;
StringStreamingRuntimeFactory runtime = new StringStreamingRuntimeFactory(sssd.getCommand(),
sssd.getPrinterFactories(), sssd.getFieldDelimiter(), sssd.getParserFactory());
- RecordDescriptor recDesc = JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema,
- context);
+ RecordDescriptor recDesc =
+ JobGenHelper.mkRecordDescriptor(context.getTypeEnvironment(op), propagatedSchema, context);
builder.contributeMicroOperator(scriptOp, runtime, recDesc);
// and contribute one edge from its child
ILogicalOperator src = scriptOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java
index 557a657..cd696bc 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/TokenizePOperator.java
@@ -91,9 +91,9 @@
JobSpecification spec = builder.getJobSpec();
RecordDescriptor inputDesc = JobGenHelper.mkRecordDescriptor(
context.getTypeEnvironment(op.getInputs().get(0).getValue()), inputSchemas[0], context);
- Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints = mp.getTokenizerRuntime(
- dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys, secondaryKeys, null, inputDesc,
- context, spec, true);
+ Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> runtimeAndConstraints =
+ mp.getTokenizerRuntime(dataSourceIndex, propagatedSchema, inputSchemas, typeEnv, primaryKeys,
+ secondaryKeys, null, inputDesc, context, spec, true);
builder.contributeHyracksOperator(tokenizeOp, runtimeAndConstraints.first);
builder.contributeAlgebricksPartitionConstraint(runtimeAndConstraints.first, runtimeAndConstraints.second);
ILogicalOperator src = tokenizeOp.getInputs().get(0).getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
index d184161..a617064 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/operators/physical/UnionAllPOperator.java
@@ -62,12 +62,12 @@
@Override
public PhysicalRequirements getRequiredPropertiesForChildren(ILogicalOperator op,
IPhysicalPropertiesVector reqdByParent, IOptimizationContext context) {
- StructuralPropertiesVector pv0 = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op,
- new StructuralPropertiesVector(new RandomPartitioningProperty(context.getComputationNodeDomain()),
- null));
- StructuralPropertiesVector pv1 = OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op,
- new StructuralPropertiesVector(new RandomPartitioningProperty(context.getComputationNodeDomain()),
- null));
+ StructuralPropertiesVector pv0 =
+ OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(
+ new RandomPartitioningProperty(context.getComputationNodeDomain()), null));
+ StructuralPropertiesVector pv1 =
+ OperatorPropertiesUtil.checkUnpartitionedAndGetPropertiesVector(op, new StructuralPropertiesVector(
+ new RandomPartitioningProperty(context.getComputationNodeDomain()), null));
return new PhysicalRequirements(new StructuralPropertiesVector[] { pv0, pv1 },
IPartitioningRequirementsCoordinator.NO_COORDINATION);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java
index 7002493..f8929e0 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/AlgebricksAppendable.java
@@ -37,7 +37,8 @@
return app;
}
- @Override public String toString() {
+ @Override
+ public String toString() {
return app.toString();
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java
index 8318176..72f891a 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalExpressionPrettyPrintVisitor.java
@@ -27,48 +27,40 @@
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.visitors.ILogicalExpressionVisitor;
-
public class LogicalExpressionPrettyPrintVisitor implements ILogicalExpressionVisitor<String, Integer> {
@Override
- public String visitConstantExpression(ConstantExpression expr, Integer indent)
+ public String visitConstantExpression(ConstantExpression expr, Integer indent) throws AlgebricksException {
+ return expr.toString();
+ }
+
+ @Override
+ public String visitVariableReferenceExpression(VariableReferenceExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitVariableReferenceExpression(
- VariableReferenceExpression expr, Integer indent)
+ public String visitAggregateFunctionCallExpression(AggregateFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitAggregateFunctionCallExpression(
- AggregateFunctionCallExpression expr, Integer indent)
+ public String visitScalarFunctionCallExpression(ScalarFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitScalarFunctionCallExpression(
- ScalarFunctionCallExpression expr, Integer indent)
+ public String visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
@Override
- public String visitStatefulFunctionCallExpression(
- StatefulFunctionCallExpression expr, Integer indent)
- throws AlgebricksException {
- return expr.toString();
- }
-
- @Override
- public String visitUnnestingFunctionCallExpression(
- UnnestingFunctionCallExpression expr, Integer indent)
+ public String visitUnnestingFunctionCallExpression(UnnestingFunctionCallExpression expr, Integer indent)
throws AlgebricksException {
return expr.toString();
}
}
-
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
index 3e75c66..0ad3fea 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/prettyprint/LogicalOperatorPrettyPrintVisitor.java
@@ -394,8 +394,7 @@
if (op.getOperation() == Kind.UPSERT) {
buffer.append(" out: ([record-before-upsert:" + op.getBeforeOpRecordVar()
+ ((op.getBeforeOpAdditionalNonFilteringVars() != null)
- ? (", additional-before-upsert: " + op.getBeforeOpAdditionalNonFilteringVars())
- : "")
+ ? (", additional-before-upsert: " + op.getBeforeOpAdditionalNonFilteringVars()) : "")
+ "]) ");
}
if (op.isBulkload()) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
index f4f5d7f..d515fcf 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/IPartitioningRequirementsCoordinator.java
@@ -46,55 +46,57 @@
}
};
- public static IPartitioningRequirementsCoordinator EQCLASS_PARTITIONING_COORDINATOR = new IPartitioningRequirementsCoordinator() {
+ public static IPartitioningRequirementsCoordinator EQCLASS_PARTITIONING_COORDINATOR =
+ new IPartitioningRequirementsCoordinator() {
- @Override
- public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty rqdpp,
- IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context)
- throws AlgebricksException {
- if (firstDeliveredPartitioning != null && rqdpp != null
- && firstDeliveredPartitioning.getPartitioningType() == rqdpp.getPartitioningType()) {
- switch (rqdpp.getPartitioningType()) {
- case UNORDERED_PARTITIONED: {
- UnorderedPartitionedProperty upp1 = (UnorderedPartitionedProperty) firstDeliveredPartitioning;
- Set<LogicalVariable> set1 = upp1.getColumnSet();
- UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) rqdpp;
- Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
- Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
- Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
+ @Override
+ public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty rqdpp,
+ IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op,
+ IOptimizationContext context) throws AlgebricksException {
+ if (firstDeliveredPartitioning != null && rqdpp != null
+ && firstDeliveredPartitioning.getPartitioningType() == rqdpp.getPartitioningType()) {
+ switch (rqdpp.getPartitioningType()) {
+ case UNORDERED_PARTITIONED: {
+ UnorderedPartitionedProperty upp1 =
+ (UnorderedPartitionedProperty) firstDeliveredPartitioning;
+ Set<LogicalVariable> set1 = upp1.getColumnSet();
+ UnorderedPartitionedProperty uppreq = (UnorderedPartitionedProperty) rqdpp;
+ Set<LogicalVariable> modifuppreq = new ListSet<LogicalVariable>();
+ Map<LogicalVariable, EquivalenceClass> eqmap = context.getEquivalenceClassMap(op);
+ Set<LogicalVariable> covered = new ListSet<LogicalVariable>();
- // coordinate from an existing partition property
- // (firstDeliveredPartitioning)
- for (LogicalVariable v : set1) {
- EquivalenceClass ecFirst = eqmap.get(v);
- for (LogicalVariable r : uppreq.getColumnSet()) {
- EquivalenceClass ec = eqmap.get(r);
- if (ecFirst == ec) {
- covered.add(v);
- modifuppreq.add(r);
- break;
+ // coordinate from an existing partition property
+ // (firstDeliveredPartitioning)
+ for (LogicalVariable v : set1) {
+ EquivalenceClass ecFirst = eqmap.get(v);
+ for (LogicalVariable r : uppreq.getColumnSet()) {
+ EquivalenceClass ec = eqmap.get(r);
+ if (ecFirst == ec) {
+ covered.add(v);
+ modifuppreq.add(r);
+ break;
+ }
+ }
}
+
+ if (!covered.equals(set1)) {
+ throw new AlgebricksException("Could not modify " + rqdpp
+ + " to agree with partitioning property " + firstDeliveredPartitioning
+ + " delivered by previous input operator.");
+ }
+ UnorderedPartitionedProperty upp2 =
+ new UnorderedPartitionedProperty(modifuppreq, rqdpp.getNodeDomain());
+ return new Pair<Boolean, IPartitioningProperty>(false, upp2);
+ }
+ case ORDERED_PARTITIONED: {
+ throw new NotImplementedException();
}
}
-
- if (!covered.equals(set1)) {
- throw new AlgebricksException("Could not modify " + rqdpp
- + " to agree with partitioning property " + firstDeliveredPartitioning
- + " delivered by previous input operator.");
- }
- UnorderedPartitionedProperty upp2 = new UnorderedPartitionedProperty(modifuppreq,
- rqdpp.getNodeDomain());
- return new Pair<Boolean, IPartitioningProperty>(false, upp2);
}
- case ORDERED_PARTITIONED: {
- throw new NotImplementedException();
- }
+ return new Pair<Boolean, IPartitioningProperty>(true, rqdpp);
}
- }
- return new Pair<Boolean, IPartitioningProperty>(true, rqdpp);
- }
- };
+ };
public Pair<Boolean, IPartitioningProperty> coordinateRequirements(IPartitioningProperty requirements,
IPartitioningProperty firstDeliveredPartitioning, ILogicalOperator op, IOptimizationContext context)
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
index af91a3a..1cd7e64 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/LocalGroupingProperty.java
@@ -114,8 +114,8 @@
return null;
}
int numColumns = newColumns.size();
- List<LogicalVariable> newOrderEnforcer = preferredOrderEnforcer.size() > numColumns ? preferredOrderEnforcer
- .subList(0, numColumns) : preferredOrderEnforcer;
+ List<LogicalVariable> newOrderEnforcer = preferredOrderEnforcer.size() > numColumns
+ ? preferredOrderEnforcer.subList(0, numColumns) : preferredOrderEnforcer;
return new LocalGroupingProperty(newColumns, newOrderEnforcer);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
index c6b4618..aa6afdb 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/StructuralPropertiesVector.java
@@ -86,8 +86,8 @@
if (reqdPart != null) {
IPartitioningProperty normalizedReqPart =
reqdPart.normalize(equivalenceClasses, mayExpandProperties ? fds : null);
- IPartitioningProperty normalizedPropPart = propPartitioning.normalize(equivalenceClasses,
- mayExpandProperties ? fds : null);
+ IPartitioningProperty normalizedPropPart =
+ propPartitioning.normalize(equivalenceClasses, mayExpandProperties ? fds : null);
if (!PropertiesUtil.matchPartitioningProps(normalizedReqPart, normalizedPropPart, mayExpandProperties)) {
diffPart = reqdPart;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
index 566c13e..9d60370 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/properties/TypePropagationPolicy.java
@@ -71,8 +71,8 @@
// found in both inner and outer branches. Fix computeOutputTypeEnvironment() in ProjectOperator
// and investigate why many test queries fail if only live variables' types are propagated.
for (int i = n - 1; i >= 0; i--) {
- Object t = typeEnvs[i].getTypeEnv().getVarType(var, nonNullVariableList,
- correlatedNullableVariableLists);
+ Object t =
+ typeEnvs[i].getTypeEnv().getVarType(var, nonNullVariableList, correlatedNullableVariableLists);
if (t == null) {
continue;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
index 8d00696..249e66f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorManipulationUtil.java
@@ -40,8 +40,7 @@
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
-import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.
- LogicalOperatorDeepCopyWithNewVariablesVisitor;
+import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.LogicalOperatorDeepCopyWithNewVariablesVisitor;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.OperatorDeepCopyVisitor;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
@@ -102,8 +101,8 @@
}
case NESTEDTUPLESOURCE: {
NestedTupleSourceOperator nts = (NestedTupleSourceOperator) op;
- AbstractLogicalOperator prevOp = (AbstractLogicalOperator) nts.getDataSourceReference().getValue()
- .getInputs().get(0).getValue();
+ AbstractLogicalOperator prevOp =
+ (AbstractLogicalOperator) nts.getDataSourceReference().getValue().getInputs().get(0).getValue();
if (prevOp.getExecutionMode() != AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) {
nts.setExecutionMode(AbstractLogicalOperator.ExecutionMode.LOCAL);
}
@@ -169,8 +168,8 @@
if (op.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE && goThroughNts) {
NestedTupleSourceOperator nts = (NestedTupleSourceOperator) op;
if (nts.getDataSourceReference() != null) {
- AbstractLogicalOperator op2 = (AbstractLogicalOperator) nts.getDataSourceReference().getValue()
- .getInputs().get(0).getValue();
+ AbstractLogicalOperator op2 =
+ (AbstractLogicalOperator) nts.getDataSourceReference().getValue().getInputs().get(0).getValue();
substituteVarRec(op2, v1, v2, goThroughNts, ctx);
}
}
@@ -202,8 +201,8 @@
public static Pair<ILogicalOperator, Map<LogicalVariable, LogicalVariable>> deepCopyWithNewVars(
ILogicalOperator root, IOptimizationContext ctx) throws AlgebricksException {
- LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor = new
- LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx, null, true);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor deepCopyVisitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx, null, true);
ILogicalOperator newRoot = deepCopyVisitor.deepCopy(root);
return Pair.of(newRoot, deepCopyVisitor.getInputToOutputVariableMapping());
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
index 9dd1ade..e0d806d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/util/OperatorPropertiesUtil.java
@@ -345,9 +345,8 @@
StructuralPropertiesVector partitionedPropertiesVector) {
ILogicalOperator leftChild = op.getInputs().get(0).getValue();
ILogicalOperator rightChild = op.getInputs().get(1).getValue();
- boolean unPartitioned =
- leftChild.getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED) && rightChild
- .getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
+ boolean unPartitioned = leftChild.getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED)
+ && rightChild.getExecutionMode().equals(AbstractLogicalOperator.ExecutionMode.UNPARTITIONED);
return unPartitioned ? StructuralPropertiesVector.EMPTY_PROPERTIES_VECTOR : partitionedPropertiesVector;
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
index 2fa4672..695630c 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/algebra/visitors/AbstractConstVarFunVisitor.java
@@ -39,7 +39,8 @@
return visitFunctionCallExpression(expr, arg);
}
- public R visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, T arg) throws AlgebricksException {
+ public R visitStatefulFunctionCallExpression(StatefulFunctionCallExpression expr, T arg)
+ throws AlgebricksException {
return visitFunctionCallExpression(expr, arg);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
index 4c42db8..13eef09 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobBuilder.java
@@ -59,7 +59,8 @@
private final Map<ILogicalOperator, AlgebricksPartitionConstraint> pcForMicroOps = new HashMap<>();
private final Map<ILogicalOperator, Integer> algebraicOpBelongingToMetaAsterixOp = new HashMap<>();
- private final Map<Integer, List<Pair<IPushRuntimeFactory, RecordDescriptor>>> metaAsterixOpSkeletons = new HashMap<>();
+ private final Map<Integer, List<Pair<IPushRuntimeFactory, RecordDescriptor>>> metaAsterixOpSkeletons =
+ new HashMap<>();
private final Map<Integer, AlgebricksMetaOperatorDescriptor> metaAsterixOps = new HashMap<>();
private final Map<IOperatorDescriptor, AlgebricksPartitionConstraint> partitionConstraintMap = new HashMap<>();
@@ -205,8 +206,8 @@
if (opInputs != null) {
for (IConnectorDescriptor conn : opInputs) {
ConnectorDescriptorId cid = conn.getConnectorId();
- org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec
- .getConnectorOperatorMap().get(cid);
+ org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p =
+ jobSpec.getConnectorOperatorMap().get(cid);
IOperatorDescriptor src = p.getLeft().getLeft();
TargetConstraint constraint = tgtConstraints.get(conn);
if (constraint != null) {
@@ -236,8 +237,8 @@
if (opInputs != null) {
for (IConnectorDescriptor conn : opInputs) {
ConnectorDescriptorId cid = conn.getConnectorId();
- org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p = jobSpec
- .getConnectorOperatorMap().get(cid);
+ org.apache.commons.lang3.tuple.Pair<org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>, org.apache.commons.lang3.tuple.Pair<IOperatorDescriptor, Integer>> p =
+ jobSpec.getConnectorOperatorMap().get(cid);
IOperatorDescriptor src = p.getLeft().getLeft();
// Pre-order DFS
setPartitionConstraintsBottomup(src.getOperatorId(), tgtConstraints, opDesc, finalPass);
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
index 947bac1..b204bcb 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/JobGenHelper.java
@@ -96,7 +96,7 @@
public static IBinaryHashFunctionFactory[] variablesToBinaryHashFunctionFactories(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
IBinaryHashFunctionFactory[] funFactories = new IBinaryHashFunctionFactory[varLogical.size()];
int i = 0;
IBinaryHashFunctionFactoryProvider bhffProvider = context.getBinaryHashFunctionFactoryProvider();
@@ -109,7 +109,7 @@
public static IBinaryHashFunctionFamily[] variablesToBinaryHashFunctionFamilies(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
IBinaryHashFunctionFamily[] funFamilies = new IBinaryHashFunctionFamily[varLogical.size()];
int i = 0;
IBinaryHashFunctionFamilyProvider bhffProvider = context.getBinaryHashFunctionFamilyProvider();
@@ -122,7 +122,7 @@
public static IBinaryComparatorFactory[] variablesToAscBinaryComparatorFactories(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
IBinaryComparatorFactory[] compFactories = new IBinaryComparatorFactory[varLogical.size()];
IBinaryComparatorFactoryProvider bcfProvider = context.getBinaryComparatorFactoryProvider();
int i = 0;
@@ -146,7 +146,7 @@
public static INormalizedKeyComputerFactory variablesToAscNormalizedKeyComputerFactory(
Collection<LogicalVariable> varLogical, IVariableTypeEnvironment env, JobGenContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
INormalizedKeyComputerFactoryProvider nkcfProvider = context.getNormalizedKeyComputerFactoryProvider();
if (nkcfProvider == null)
return null;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
index f817cd6..7409247 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/jobgen/impl/PlanCompiler.java
@@ -37,7 +37,8 @@
public class PlanCompiler {
private JobGenContext context;
- private Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> operatorVisitedToParents = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
+ private Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> operatorVisitedToParents =
+ new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
public PlanCompiler(JobGenContext context) {
this.context = context;
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
index 0a4b298..4388032 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/rewriter/base/HeuristicOptimizer.java
@@ -37,13 +37,13 @@
public class HeuristicOptimizer {
- public static PhysicalOperatorTag[] hyracksOperators = new PhysicalOperatorTag[] {
- PhysicalOperatorTag.DATASOURCE_SCAN, PhysicalOperatorTag.BTREE_SEARCH,
- PhysicalOperatorTag.EXTERNAL_GROUP_BY, PhysicalOperatorTag.HASH_GROUP_BY, PhysicalOperatorTag.HDFS_READER,
- PhysicalOperatorTag.HYBRID_HASH_JOIN, PhysicalOperatorTag.IN_MEMORY_HASH_JOIN,
- PhysicalOperatorTag.NESTED_LOOP, PhysicalOperatorTag.PRE_SORTED_DISTINCT_BY,
- PhysicalOperatorTag.PRE_CLUSTERED_GROUP_BY, PhysicalOperatorTag.REPLICATE, PhysicalOperatorTag.STABLE_SORT,
- PhysicalOperatorTag.UNION_ALL };
+ public static PhysicalOperatorTag[] hyracksOperators =
+ new PhysicalOperatorTag[] { PhysicalOperatorTag.DATASOURCE_SCAN, PhysicalOperatorTag.BTREE_SEARCH,
+ PhysicalOperatorTag.EXTERNAL_GROUP_BY, PhysicalOperatorTag.HASH_GROUP_BY,
+ PhysicalOperatorTag.HDFS_READER, PhysicalOperatorTag.HYBRID_HASH_JOIN,
+ PhysicalOperatorTag.IN_MEMORY_HASH_JOIN, PhysicalOperatorTag.NESTED_LOOP,
+ PhysicalOperatorTag.PRE_SORTED_DISTINCT_BY, PhysicalOperatorTag.PRE_CLUSTERED_GROUP_BY,
+ PhysicalOperatorTag.REPLICATE, PhysicalOperatorTag.STABLE_SORT, PhysicalOperatorTag.UNION_ALL };
public static PhysicalOperatorTag[] hyraxOperatorsBelowWhichJobGenIsDisabled = new PhysicalOperatorTag[] {};
public static boolean isHyracksOp(PhysicalOperatorTag opTag) {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
index 10e3432..e782e4f 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatBuilder.java
@@ -79,7 +79,7 @@
public class Node {
private final StringValue nodeId;
- private HashMap<String,AttributeValue> attributes = new HashMap<>();
+ private HashMap<String, AttributeValue> attributes = new HashMap<>();
// no instantiation
private Node(StringValue nodeId, StringValue nodeLabel) {
@@ -142,7 +142,7 @@
public class Edge {
private final Node source;
private final Node destination;
- private final HashMap<String,AttributeValue> attributes = new HashMap<>();
+ private final HashMap<String, AttributeValue> attributes = new HashMap<>();
// no instantiation
private Edge(Node source, Node destination) {
@@ -214,7 +214,7 @@
public static final class StringValue extends AttributeValue {
// no instantiation
- private StringValue (String value) {
+ private StringValue(String value) {
super(value);
}
@@ -224,7 +224,7 @@
newValue = "";
}
newValue = newValue.replace("\n", "\\n");
- return new StringValue("\"" + newValue.replace("\"","\'").trim() + "\"");
+ return new StringValue("\"" + newValue.replace("\"", "\'").trim() + "\"");
}
}
@@ -233,7 +233,7 @@
public static final Color SKYBLUE = new Color("skyblue");
// no instantiation
- private Color (String color) {
+ private Color(String color) {
super(color);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java
index 1ea2d19..8ada0ac 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/DotFormatGenerator.java
@@ -56,8 +56,7 @@
* @return DOT format
*/
public static String generate(final JobActivityGraph jobActivityGraph) {
- final DotFormatBuilder graphBuilder =
- new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobActivityGraph"));
+ final DotFormatBuilder graphBuilder = new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobActivityGraph"));
List<IConnectorDescriptor> connectors;
IActivity activity;
ActivityId fromActivityId;
@@ -154,11 +153,10 @@
* @return DOT format
*/
public static String generate(final JobSpecification jobSpecification) {
- final DotFormatBuilder graphBuilder =
- new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobSpecification"));
+ final DotFormatBuilder graphBuilder = new DotFormatBuilder(DotFormatBuilder.StringValue.of("JobSpecification"));
final Map<ConnectorDescriptorId, IConnectorDescriptor> connectorMap = jobSpecification.getConnectorMap();
- final Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>>
- cOp = jobSpecification.getConnectorOperatorMap();
+ final Map<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> cOp =
+ jobSpecification.getConnectorOperatorMap();
ConnectorDescriptorId connectorId;
IConnectorDescriptor connector;
IOperatorDescriptor leftOperator;
@@ -168,24 +166,24 @@
String source;
String destination;
String edgeLabel;
- for (Map.Entry<ConnectorDescriptorId,
- Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : cOp.entrySet()) {
+ for (Map.Entry<ConnectorDescriptorId, Pair<Pair<IOperatorDescriptor, Integer>, Pair<IOperatorDescriptor, Integer>>> entry : cOp
+ .entrySet()) {
connectorId = entry.getKey();
connector = connectorMap.get(connectorId);
edgeLabel = connector.getClass().getName().substring(connector.getClass().getName().lastIndexOf(".") + 1);
edgeLabel += "-" + connectorId;
leftOperator = entry.getValue().getLeft().getLeft();
rightOperator = entry.getValue().getRight().getLeft();
- source = leftOperator.getClass().getName().substring(
- leftOperator.getClass().getName().lastIndexOf(".") + 1);
- sourceNode = graphBuilder.createNode(
- DotFormatBuilder.StringValue.of(leftOperator.getOperatorId().toString()),
- DotFormatBuilder.StringValue.of(leftOperator.toString() + "-" + source));
- destination = rightOperator.getClass().getName().substring(
- rightOperator.getClass().getName().lastIndexOf(".") + 1);
- destinationNode = graphBuilder.createNode(
- DotFormatBuilder.StringValue.of(rightOperator.getOperatorId().toString()),
- DotFormatBuilder.StringValue.of(rightOperator.toString() + "-" + destination));
+ source = leftOperator.getClass().getName()
+ .substring(leftOperator.getClass().getName().lastIndexOf(".") + 1);
+ sourceNode =
+ graphBuilder.createNode(DotFormatBuilder.StringValue.of(leftOperator.getOperatorId().toString()),
+ DotFormatBuilder.StringValue.of(leftOperator.toString() + "-" + source));
+ destination = rightOperator.getClass().getName()
+ .substring(rightOperator.getClass().getName().lastIndexOf(".") + 1);
+ destinationNode =
+ graphBuilder.createNode(DotFormatBuilder.StringValue.of(rightOperator.getOperatorId().toString()),
+ DotFormatBuilder.StringValue.of(rightOperator.toString() + "-" + destination));
graphBuilder.createEdge(sourceNode, destinationNode).setLabel(DotFormatBuilder.StringValue.of(edgeLabel));
}
@@ -209,17 +207,16 @@
}
public static void generateNode(DotFormatBuilder dotBuilder, ILogicalOperator op,
- LogicalOperatorDotVisitor dotVisitor, Set<ILogicalOperator> operatorsVisited)
- throws AlgebricksException {
+ LogicalOperatorDotVisitor dotVisitor, Set<ILogicalOperator> operatorsVisited) throws AlgebricksException {
DotFormatBuilder.StringValue destinationNodeLabel = formatStringOf(op, dotVisitor);
- DotFormatBuilder.Node destinationNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(op.hashCode())), destinationNodeLabel);
+ DotFormatBuilder.Node destinationNode = dotBuilder
+ .createNode(DotFormatBuilder.StringValue.of(Integer.toString(op.hashCode())), destinationNodeLabel);
DotFormatBuilder.StringValue sourceNodeLabel;
DotFormatBuilder.Node sourceNode;
for (Mutable<ILogicalOperator> child : op.getInputs()) {
sourceNodeLabel = formatStringOf(child.getValue(), dotVisitor);
- sourceNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(child.getValue().hashCode())), sourceNodeLabel);
+ sourceNode = dotBuilder.createNode(
+ DotFormatBuilder.StringValue.of(Integer.toString(child.getValue().hashCode())), sourceNodeLabel);
dotBuilder.createEdge(sourceNode, destinationNode);
if (!operatorsVisited.contains(child.getValue())) {
generateNode(dotBuilder, child.getValue(), dotVisitor, operatorsVisited);
@@ -230,10 +227,9 @@
for (ILogicalPlan nestedPlan : ((AbstractOperatorWithNestedPlans) op).getNestedPlans()) {
nestedOperator = nestedPlan.getRoots().get(0).getValue();
sourceNodeLabel = formatStringOf(nestedOperator, dotVisitor);
- sourceNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(nestedOperator.hashCode())), sourceNodeLabel);
- dotBuilder.createEdge(sourceNode, destinationNode).
- setLabel(DotFormatBuilder.StringValue.of("subplan"));
+ sourceNode = dotBuilder.createNode(
+ DotFormatBuilder.StringValue.of(Integer.toString(nestedOperator.hashCode())), sourceNodeLabel);
+ dotBuilder.createEdge(sourceNode, destinationNode).setLabel(DotFormatBuilder.StringValue.of("subplan"));
if (!operatorsVisited.contains(nestedOperator)) {
generateNode(dotBuilder, nestedOperator, dotVisitor, operatorsVisited);
}
@@ -251,8 +247,9 @@
for (int i = 0; i < replicateOperator.getOutputs().size(); i++) {
replicateOutput = replicateOperator.getOutputs().get(i).getValue();
destinationNodeLabel = formatStringOf(replicateOutput, dotVisitor);
- destinationNode = dotBuilder.createNode(DotFormatBuilder.StringValue.of(
- Integer.toString(replicateOutput.hashCode())), destinationNodeLabel);
+ destinationNode = dotBuilder.createNode(
+ DotFormatBuilder.StringValue.of(Integer.toString(replicateOutput.hashCode())),
+ destinationNodeLabel);
if (replicateOperator.getOutputMaterializationFlags()[i]) {
dotBuilder.createEdge(sourceNode, destinationNode).setColor(DotFormatBuilder.Color.RED);
} else {
@@ -267,7 +264,7 @@
private static DotFormatBuilder.StringValue formatStringOf(ILogicalOperator operator,
LogicalOperatorDotVisitor dotVisitor) throws AlgebricksException {
String formattedString = operator.accept(dotVisitor, null).trim();
- IPhysicalOperator physicalOperator = ((AbstractLogicalOperator)operator).getPhysicalOperator();
+ IPhysicalOperator physicalOperator = ((AbstractLogicalOperator) operator).getPhysicalOperator();
if (physicalOperator != null) {
formattedString += "\\n" + physicalOperator.toString().trim() + " |" + operator.getExecutionMode() + "|";
} else {
diff --git a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java
index a54ff63..4649d6d 100644
--- a/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-core/src/main/java/org/apache/hyracks/algebricks/core/utils/LogicalOperatorDotVisitor.java
@@ -350,7 +350,7 @@
@Override
public String visitScriptOperator(ScriptOperator op, Void noArgs) throws AlgebricksException {
stringBuilder.setLength(0);
- stringBuilder.append("script (in: ").append(op.getInputVariables()).append(") (out: " )
+ stringBuilder.append("script (in: ").append(op.getInputVariables()).append(") (out: ")
.append(op.getOutputVariables()).append(")");
return stringBuilder.toString();
}
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
index 0992489..93dd3d5 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/IBinaryHashFunctionFamilyProvider.java
@@ -24,6 +24,5 @@
public interface IBinaryHashFunctionFamilyProvider {
- public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type)
- throws AlgebricksException;
+ public IBinaryHashFunctionFamily getBinaryHashFunctionFamily(Object type) throws AlgebricksException;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java
index 97e7d95..ba27c4e 100644
--- a/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java
+++ b/hyracks-fullstack/algebricks/algebricks-data/src/main/java/org/apache/hyracks/algebricks/data/utils/WriteValueTools.java
@@ -26,10 +26,10 @@
public final class WriteValueTools {
- private final static int[] INT_INTERVALS = { 9, 99, 999, 9999, 99999, 999999, 9999999, 99999999, 999999999,
- Integer.MAX_VALUE };
- private final static int[] INT_DIVIDERS = { 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000,
- 1000000000 };
+ private final static int[] INT_INTERVALS =
+ { 9, 99, 999, 9999, 99999, 999999, 9999999, 99999999, 999999999, Integer.MAX_VALUE };
+ private final static int[] INT_DIVIDERS =
+ { 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000 };
private final static int[] DIGITS = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' };
public static void writeInt(int i, OutputStream os) throws IOException {
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
index 2870074..3f61cc0 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceCombinerRule.java
@@ -65,9 +65,8 @@
}
}
- protected Pair<Boolean, Mutable<ILogicalOperator>> tryToPushAgg(AggregateOperator initAgg,
- GroupByOperator newGbyOp, Set<SimilarAggregatesInfo> toReplaceSet, IOptimizationContext context)
- throws AlgebricksException {
+ protected Pair<Boolean, Mutable<ILogicalOperator>> tryToPushAgg(AggregateOperator initAgg, GroupByOperator newGbyOp,
+ Set<SimilarAggregatesInfo> toReplaceSet, IOptimizationContext context) throws AlgebricksException {
List<LogicalVariable> initVars = initAgg.getVariables();
List<Mutable<ILogicalExpression>> initExprs = initAgg.getExpressions();
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java
index ed4196b..a921301 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/AbstractIntroduceGroupByCombinerRule.java
@@ -212,7 +212,7 @@
private Pair<Boolean, ILogicalPlan> tryToPushSubplan(ILogicalPlan nestedPlan, GroupByOperator oldGbyOp,
GroupByOperator newGbyOp, BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
List<Mutable<ILogicalOperator>> pushedRoots = new ArrayList<Mutable<ILogicalOperator>>();
Set<SimilarAggregatesInfo> toReplaceSet = new HashSet<SimilarAggregatesInfo>();
for (Mutable<ILogicalOperator> r : nestedPlan.getRoots()) {
@@ -272,7 +272,7 @@
private boolean tryToPushRoot(Mutable<ILogicalOperator> root, GroupByOperator oldGbyOp, GroupByOperator newGbyOp,
BookkeepingInfo bi, List<LogicalVariable> gbyVars, IOptimizationContext context,
List<Mutable<ILogicalOperator>> toPushAccumulate, Set<SimilarAggregatesInfo> toReplaceSet)
- throws AlgebricksException {
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) root.getValue();
if (op1.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
@@ -399,8 +399,7 @@
* @return the bottom-most reference of a select operator
*/
private Mutable<ILogicalOperator> findBottomOpRefStayInOldGby(GroupByOperator nestedGby,
- Mutable<ILogicalOperator> currentOpRef)
- throws AlgebricksException {
+ Mutable<ILogicalOperator> currentOpRef) throws AlgebricksException {
Set<LogicalVariable> usedVarsInNestedGby = new HashSet<>();
// Collects used variables in nested pipelines.
for (ILogicalPlan nestedPlan : nestedGby.getNestedPlans()) {
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
index f5bec22..d975cce 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/BreakSelectIntoConjunctsRule.java
@@ -43,7 +43,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op = (AbstractLogicalOperator) opRef.getValue();
if (op.getOperatorTag() != LogicalOperatorTag.SELECT) {
return false;
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
index 2ab8520..fa35a98 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ComplexUnnestToProductRule.java
@@ -115,8 +115,8 @@
outerRoot = buildOperatorChain(outerOps, null, context);
context.computeAndSetTypeEnvironmentForOperator(outerRoot);
- InnerJoinOperator product = new InnerJoinOperator(
- new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
+ InnerJoinOperator product =
+ new InnerJoinOperator(new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
// Outer branch.
product.getInputs().add(new MutableObject<ILogicalOperator>(outerRoot));
// Inner branch.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java
index 372af26..53548e4 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/CopyLimitDownRule.java
@@ -91,8 +91,8 @@
// Need to add an offset to the given limit value
// since the original topmost limit will use the offset value.
// We can't apply the offset multiple times.
- IFunctionInfo finfoAdd = context.getMetadataProvider()
- .lookupFunction(AlgebricksBuiltinFunctions.NUMERIC_ADD);
+ IFunctionInfo finfoAdd =
+ context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NUMERIC_ADD);
List<Mutable<ILogicalExpression>> addArgs = new ArrayList<>();
addArgs.add(
new MutableObject<ILogicalExpression>(limitOp.getMaxObjects().getValue().cloneExpression()));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
index fcec50a..baad59b 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceOrderByAfterSubplan.java
@@ -64,7 +64,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -108,7 +109,7 @@
foundTarget = false;
break;
}
- if(child.getOperatorTag() == LogicalOperatorTag.GROUP){
+ if (child.getOperatorTag() == LogicalOperatorTag.GROUP) {
foundTarget = false;
break;
}
@@ -137,8 +138,8 @@
return false;
}
}
- List<Pair<IOrder, Mutable<ILogicalExpression>>> orderExprs = deepCopyOrderAndExpression(sourceOrderOp
- .getOrderExpressions());
+ List<Pair<IOrder, Mutable<ILogicalExpression>>> orderExprs =
+ deepCopyOrderAndExpression(sourceOrderOp.getOrderExpressions());
OrderOperator newOrderOp = new OrderOperator(orderExprs);
context.addToDontApplySet(this, newOrderOp);
inputs.set(i, new MutableObject<ILogicalOperator>(newOrderOp));
@@ -155,15 +156,17 @@
}
private Mutable<ILogicalExpression> deepCopyExpressionRef(Mutable<ILogicalExpression> oldExpr) {
- return new MutableObject<ILogicalExpression>(((AbstractLogicalExpression) oldExpr.getValue()).cloneExpression());
+ return new MutableObject<ILogicalExpression>(
+ ((AbstractLogicalExpression) oldExpr.getValue()).cloneExpression());
}
private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderAndExpression(
List<Pair<IOrder, Mutable<ILogicalExpression>>> ordersAndExprs) {
- List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs = new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();
+ List<Pair<IOrder, Mutable<ILogicalExpression>>> newOrdersAndExprs =
+ new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>();
for (Pair<IOrder, Mutable<ILogicalExpression>> pair : ordersAndExprs)
- newOrdersAndExprs.add(new Pair<IOrder, Mutable<ILogicalExpression>>(pair.first,
- deepCopyExpressionRef(pair.second)));
+ newOrdersAndExprs
+ .add(new Pair<IOrder, Mutable<ILogicalExpression>>(pair.first, deepCopyExpressionRef(pair.second)));
return newOrdersAndExprs;
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
index 6763e2b..da0466e 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/EnforceStructuralPropertiesRule.java
@@ -149,8 +149,8 @@
}
AbstractLogicalOperator op = (AbstractLogicalOperator) root.getValue();
op.computeDeliveredPhysicalProperties(context);
- AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Structural properties for " + op.getPhysicalOperator()
- + ": " + op.getDeliveredPhysicalProperties() + "\n");
+ AlgebricksConfig.ALGEBRICKS_LOGGER.trace(">>>> Structural properties for " + op.getPhysicalOperator() + ": "
+ + op.getDeliveredPhysicalProperties() + "\n");
}
return changed;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
index f51c9ea..b95d6e4 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonExpressionsRule.java
@@ -89,7 +89,8 @@
private final List<ILogicalExpression> originalAssignExprs = new ArrayList<ILogicalExpression>();
private final CommonExpressionSubstitutionVisitor substVisitor = new CommonExpressionSubstitutionVisitor();
- private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap = new HashMap<ILogicalExpression, ExprEquivalenceClass>();
+ private final Map<ILogicalExpression, ExprEquivalenceClass> exprEqClassMap =
+ new HashMap<ILogicalExpression, ExprEquivalenceClass>();
// Set of operators for which common subexpression elimination should not be performed.
private static final Set<LogicalOperatorTag> ignoreOps = new HashSet<LogicalOperatorTag>(6);
@@ -310,8 +311,8 @@
return false;
}
// Place a Select operator beneath op that contains the enclosing expression.
- SelectOperator selectOp = new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr),
- false, null);
+ SelectOperator selectOp =
+ new SelectOperator(new MutableObject<ILogicalExpression>(enclosingExpr), false, null);
selectOp.getInputs().add(new MutableObject<ILogicalOperator>(op.getInputs().get(0).getValue()));
op.getInputs().get(0).setValue(selectOp);
// Set firstOp to be the select below op, since we want to assign the common subexpr there.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
index 5a4cacd..923ffb5 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractCommonOperatorsRule.java
@@ -52,8 +52,8 @@
public class ExtractCommonOperatorsRule implements IAlgebraicRewriteRule {
- private final HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childrenToParents
- = new HashMap<>();
+ private final HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childrenToParents =
+ new HashMap<>();
private final List<Mutable<ILogicalOperator>> roots = new ArrayList<>();
private final List<List<Mutable<ILogicalOperator>>> equivalenceClasses = new ArrayList<>();
private final HashMap<Mutable<ILogicalOperator>, BitSet> opToCandidateInputs = new HashMap<>();
@@ -210,7 +210,8 @@
continue;
}
ArrayList<LogicalVariable> liveVars = new ArrayList<LogicalVariable>();
- Map<LogicalVariable, LogicalVariable> variableMappingBack = new HashMap<LogicalVariable, LogicalVariable>();
+ Map<LogicalVariable, LogicalVariable> variableMappingBack =
+ new HashMap<LogicalVariable, LogicalVariable>();
IsomorphismUtilities.mapVariablesTopDown(ref.getValue(), candidate.getValue(), variableMappingBack);
for (int i = 0; i < liveVarsNew.size(); i++) {
liveVars.add(variableMappingBack.get(liveVarsNew.get(i)));
@@ -240,8 +241,8 @@
for (Mutable<ILogicalOperator> parentOpRef : parentOpList) {
AbstractLogicalOperator parentOp = (AbstractLogicalOperator) parentOpRef.getValue();
int index = parentOp.getInputs().indexOf(ref);
- ILogicalOperator childOp = parentOp.getOperatorTag() == LogicalOperatorTag.PROJECT ? assignOperator
- : projectOperator;
+ ILogicalOperator childOp =
+ parentOp.getOperatorTag() == LogicalOperatorTag.PROJECT ? assignOperator : projectOperator;
if (!HeuristicOptimizer.isHyracksOp(parentOp.getPhysicalOperator().getOperatorTag())) {
parentOp.getInputs().set(index, new MutableObject<ILogicalOperator>(childOp));
} else {
@@ -263,7 +264,8 @@
}
private void genCandidates(IOptimizationContext context) throws AlgebricksException {
- List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses = new ArrayList<List<Mutable<ILogicalOperator>>>();
+ List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses =
+ new ArrayList<List<Mutable<ILogicalOperator>>>();
while (equivalenceClasses.size() > 0) {
previousEquivalenceClasses.clear();
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
@@ -364,7 +366,8 @@
}
private void prune(IOptimizationContext context) throws AlgebricksException {
- List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses = new ArrayList<List<Mutable<ILogicalOperator>>>();
+ List<List<Mutable<ILogicalOperator>>> previousEquivalenceClasses =
+ new ArrayList<List<Mutable<ILogicalOperator>>>();
for (List<Mutable<ILogicalOperator>> candidates : equivalenceClasses) {
List<Mutable<ILogicalOperator>> candidatesCopy = new ArrayList<Mutable<ILogicalOperator>>();
candidatesCopy.addAll(candidates);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java
index 71a00bf..198510a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractFunctionsFromJoinConditionRule.java
@@ -63,7 +63,8 @@
public class ExtractFunctionsFromJoinConditionRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -104,8 +105,8 @@
for (Mutable<ILogicalExpression> exprRef : fexp.getArguments()) {
if (exprRef.getValue().getExpressionTag() == LogicalExpressionTag.FUNCTION_CALL) {
LogicalVariable newVar = context.newVar();
- AssignOperator newAssign = new AssignOperator(newVar, new MutableObject<ILogicalExpression>(exprRef
- .getValue().cloneExpression()));
+ AssignOperator newAssign = new AssignOperator(newVar,
+ new MutableObject<ILogicalExpression>(exprRef.getValue().cloneExpression()));
newAssign.setExecutionMode(joinOp.getExecutionMode());
// Place assign below joinOp.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java
index a2ad732..05cc7b6 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ExtractGroupByDecorVariablesRule.java
@@ -66,7 +66,7 @@
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> decorVarExpr : decorList) {
Mutable<ILogicalExpression> exprRef = decorVarExpr.second;
ILogicalExpression expr = exprRef.getValue();
- if (expr ==null || expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
+ if (expr == null || expr.getExpressionTag() == LogicalExpressionTag.VARIABLE) {
continue;
}
// Rewrites the decoration entry if the decoration expression is not a variable reference expression.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
index 38776c6..2f28a84 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/FactorRedundantGroupAndDecorVarsRule.java
@@ -43,7 +43,8 @@
public class FactorRedundantGroupAndDecorVarsRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -77,8 +78,8 @@
LogicalVariable lhs = varRhsToLhs.get(v);
if (lhs != null) {
if (p.first != null) {
- AssignOperator assign = new AssignOperator(p.first, new MutableObject<ILogicalExpression>(
- new VariableReferenceExpression(lhs)));
+ AssignOperator assign = new AssignOperator(p.first,
+ new MutableObject<ILogicalExpression>(new VariableReferenceExpression(lhs)));
ILogicalOperator op = opRef.getValue();
assign.getInputs().add(new MutableObject<ILogicalOperator>(op));
opRef.setValue(assign);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java
index a61b1a2..8d54a67 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InferTypesRule.java
@@ -28,7 +28,8 @@
public class InferTypesRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
index 71fde61..2c825b7 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/InlineSingleReferenceVariablesRule.java
@@ -51,7 +51,8 @@
public class InlineSingleReferenceVariablesRule extends InlineVariablesRule {
// Maps from variable to a list of operators using that variable.
- protected Map<LogicalVariable, List<ILogicalOperator>> usedVarsMap = new HashMap<LogicalVariable, List<ILogicalOperator>>();
+ protected Map<LogicalVariable, List<ILogicalOperator>> usedVarsMap =
+ new HashMap<LogicalVariable, List<ILogicalOperator>>();
protected List<LogicalVariable> usedVars = new ArrayList<LogicalVariable>();
@Override
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
index 641ccfe..081f199 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/IntroJoinInsideSubplanRule.java
@@ -85,12 +85,12 @@
OperatorPropertiesUtil.getFreeVariablesInSelfOrDesc(op1, free1);
if (!free1.isEmpty()) {
OperatorManipulationUtil.ntsToEts(op2Ref, context);
- NestedTupleSourceOperator nts = new NestedTupleSourceOperator(
- new MutableObject<ILogicalOperator>(subplan));
+ NestedTupleSourceOperator nts =
+ new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(subplan));
Mutable<ILogicalOperator> ntsRef = new MutableObject<ILogicalOperator>(nts);
Mutable<ILogicalOperator> innerRef = new MutableObject<ILogicalOperator>(op2);
- InnerJoinOperator join = new InnerJoinOperator(new MutableObject<ILogicalExpression>(
- ConstantExpression.TRUE), ntsRef, innerRef);
+ InnerJoinOperator join = new InnerJoinOperator(
+ new MutableObject<ILogicalExpression>(ConstantExpression.TRUE), ntsRef, innerRef);
op2Ref.setValue(join);
context.computeAndSetTypeEnvironmentForOperator(nts);
context.computeAndSetTypeEnvironmentForOperator(join);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
index da85e0d..a5cc573 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PullSelectOutOfEqJoin.java
@@ -43,7 +43,8 @@
public class PullSelectOutOfEqJoin implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
index bbb01dd..43c58e2 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignBelowUnionAllRule.java
@@ -110,8 +110,8 @@
for (int j = 0; j < assignVars.size(); j++) {
LogicalVariable first = newAssignOps[0].getVariables().get(j);
LogicalVariable second = newAssignOps[1].getVariables().get(j);
- Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
- first, second, assignVars.get(j));
+ Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping =
+ new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(first, second, assignVars.get(j));
unionOp.getVariableMappings().add(varMapping);
}
context.computeAndSetTypeEnvironmentForOperator(unionOp);
@@ -127,7 +127,7 @@
private AssignOperator createAssignBelowUnionAllBranch(UnionAllOperator unionOp, int inputIndex,
AssignOperator originalAssignOp, Set<LogicalVariable> assignUsedVars, IOptimizationContext context)
- throws AlgebricksException {
+ throws AlgebricksException {
AssignOperator newAssignOp = cloneAssignOperator(originalAssignOp, context);
newAssignOp.getInputs()
.add(new MutableObject<ILogicalOperator>(unionOp.getInputs().get(inputIndex).getValue()));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
index f3f0e02..edd7e23 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushAssignDownThroughProductRule.java
@@ -37,7 +37,8 @@
public class PushAssignDownThroughProductRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
index c04a9d5..d7090d2 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushFunctionsBelowJoin.java
@@ -84,7 +84,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java
index 7ea1327..192e318 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushGroupByIntoSortRule.java
@@ -46,7 +46,8 @@
public class PushGroupByIntoSortRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
@@ -67,8 +68,8 @@
Mutable<ILogicalOperator> op2Ref = op.getInputs().get(0).getValue().getInputs().get(0);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op2Ref.getValue();
if (op2.getPhysicalOperator().getOperatorTag() == PhysicalOperatorTag.STABLE_SORT) {
- AbstractStableSortPOperator sortPhysicalOperator = (AbstractStableSortPOperator) op2
- .getPhysicalOperator();
+ AbstractStableSortPOperator sortPhysicalOperator =
+ (AbstractStableSortPOperator) op2.getPhysicalOperator();
if (groupByOperator.getNestedPlans().size() != 1) {
//Sort group-by currently works only for one nested plan with one root containing
//an aggregate and a nested-tuple-source.
@@ -88,13 +89,14 @@
continue;
}
AggregateOperator aggOp = (AggregateOperator) r0.getValue();
- AbstractLogicalOperator aggInputOp = (AbstractLogicalOperator) aggOp.getInputs().get(0)
- .getValue();
+ AbstractLogicalOperator aggInputOp =
+ (AbstractLogicalOperator) aggOp.getInputs().get(0).getValue();
if (aggInputOp.getOperatorTag() != LogicalOperatorTag.NESTEDTUPLESOURCE) {
continue;
}
- boolean hasIntermediateAggregate = generateMergeAggregationExpressions(groupByOperator, context);
+ boolean hasIntermediateAggregate =
+ generateMergeAggregationExpressions(groupByOperator, context);
if (!hasIntermediateAggregate) {
continue;
}
@@ -132,8 +134,8 @@
"External/sort group-by currently works only for one nested plan with one root containing"
+ "an aggregate and a nested-tuple-source.");
}
- IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
- .getMergeAggregationExpressionFactory();
+ IMergeAggregationExpressionFactory mergeAggregationExpressionFactory =
+ context.getMergeAggregationExpressionFactory();
Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
AggregateOperator aggOp = (AggregateOperator) r0.getValue();
List<Mutable<ILogicalExpression>> aggFuncRefs = aggOp.getExpressions();
@@ -141,8 +143,8 @@
int n = aggOp.getExpressions().size();
List<Mutable<ILogicalExpression>> mergeExpressionRefs = new ArrayList<Mutable<ILogicalExpression>>();
for (int i = 0; i < n; i++) {
- ILogicalExpression mergeExpr = mergeAggregationExpressionFactory.createMergeAggregation(
- originalAggVars.get(i), aggFuncRefs.get(i).getValue(), context);
+ ILogicalExpression mergeExpr = mergeAggregationExpressionFactory
+ .createMergeAggregation(originalAggVars.get(i), aggFuncRefs.get(i).getValue(), context);
if (mergeExpr == null) {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java
index f71af5a..3181459 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushMapOperatorDownThroughProductRule.java
@@ -53,7 +53,7 @@
if (!OperatorPropertiesUtil.isMovable(op1)) {
return false;
- };
+ } ;
Mutable<ILogicalOperator> op2Ref = op1.getInputs().get(0);
AbstractLogicalOperator op2 = (AbstractLogicalOperator) op2Ref.getValue();
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
index fc7b98e..aa58985 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushNestedOrderByUnderPreSortedGroupByRule.java
@@ -45,7 +45,8 @@
public class PushNestedOrderByUnderPreSortedGroupByRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
index e05619f..281093a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushProjectIntoDataSourceScanRule.java
@@ -32,7 +32,8 @@
public class PushProjectIntoDataSourceScanRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
index 54c5728..d135846 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushSubplanWithAggregateDownThroughProductRule.java
@@ -41,7 +41,8 @@
public class PushSubplanWithAggregateDownThroughProductRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java
index 2da1343..4c2c4da 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughProductRule.java
@@ -37,7 +37,8 @@
public class PushUnnestDownThroughProductRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java
index b6556e9..3ef37cd 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/PushUnnestDownThroughUnionRule.java
@@ -79,11 +79,11 @@
}
LogicalVariable unnestVar1 = context.newVar();
- UnnestOperator unnest1 = new UnnestOperator(unnestVar1, new MutableObject<ILogicalExpression>(unnestOpRef
- .getExpressionRef().getValue().cloneExpression()));
+ UnnestOperator unnest1 = new UnnestOperator(unnestVar1,
+ new MutableObject<ILogicalExpression>(unnestOpRef.getExpressionRef().getValue().cloneExpression()));
LogicalVariable unnestVar2 = context.newVar();
- UnnestOperator unnest2 = new UnnestOperator(unnestVar2, new MutableObject<ILogicalExpression>(unnestOpRef
- .getExpressionRef().getValue().cloneExpression()));
+ UnnestOperator unnest2 = new UnnestOperator(unnestVar2,
+ new MutableObject<ILogicalExpression>(unnestOpRef.getExpressionRef().getValue().cloneExpression()));
//Getting the two topmost branched and adding them as an input to the unnests:
Mutable<ILogicalOperator> branch1 = unionAbstractOp.getInputs().get(0);
@@ -110,10 +110,11 @@
context.computeAndSetTypeEnvironmentForOperator(unnest2);
//creating a new union operator with the updated logical variables
- List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap = new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(
- 1);
- Triple<LogicalVariable, LogicalVariable, LogicalVariable> union_triple_vars = new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(
- unnestVar1, unnestVar2, unnestOpRef.getVariables().get(0));
+ List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> varMap =
+ new ArrayList<Triple<LogicalVariable, LogicalVariable, LogicalVariable>>(1);
+ Triple<LogicalVariable, LogicalVariable, LogicalVariable> union_triple_vars =
+ new Triple<LogicalVariable, LogicalVariable, LogicalVariable>(unnestVar1, unnestVar2,
+ unnestOpRef.getVariables().get(0));
varMap.add(union_triple_vars);
UnionAllOperator unionOpFinal = new UnionAllOperator(varMap);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
index b43363a..bf649ab 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/ReinferAllTypesRule.java
@@ -37,7 +37,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
if (context.checkIfInDontApplySet(this, opRef.getValue())) {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
index 38e97d7..fbaab78 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantProjectionRule.java
@@ -53,7 +53,8 @@
}
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
AbstractLogicalOperator op1 = (AbstractLogicalOperator) opRef.getValue();
if (op1.getOperatorTag() == LogicalOperatorTag.PROJECT) {
Mutable<ILogicalOperator> opRef2 = op1.getInputs().get(0);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
index 2f0913b..5386193 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveRedundantVariablesRule.java
@@ -69,7 +69,8 @@
public class RemoveRedundantVariablesRule implements IAlgebraicRewriteRule {
private final VariableSubstitutionVisitor substVisitor = new VariableSubstitutionVisitor();
- private final Map<LogicalVariable, List<LogicalVariable>> equivalentVarsMap = new HashMap<LogicalVariable, List<LogicalVariable>>();
+ private final Map<LogicalVariable, List<LogicalVariable>> equivalentVarsMap =
+ new HashMap<LogicalVariable, List<LogicalVariable>>();
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java
index 84d7c9d..e42c067 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnnecessarySortMergeExchange.java
@@ -110,8 +110,8 @@
for (OrderColumn oc : sme.getSortColumns()) {
ocList.add(oc);
}
- HashPartitionMergeExchangePOperator hpme = new HashPartitionMergeExchangePOperator(ocList,
- hpe.getHashFields(), hpe.getDomain());
+ HashPartitionMergeExchangePOperator hpme =
+ new HashPartitionMergeExchangePOperator(ocList, hpe.getHashFields(), hpe.getDomain());
op1.setPhysicalOperator(hpme);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
index 5bf4e6881..e197814 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/RemoveUnusedAssignAndAggregateRule.java
@@ -126,8 +126,8 @@
// since we are sure that the output of UNIONALL operator is used
// afterwards.
if (opRef.getValue().getOperatorTag() == LogicalOperatorTag.UNIONALL) {
- Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter = ((UnionAllOperator) opRef
- .getValue()).getVariableMappings().iterator();
+ Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter =
+ ((UnionAllOperator) opRef.getValue()).getVariableMappings().iterator();
while (iter.hasNext()) {
Triple<LogicalVariable, LogicalVariable, LogicalVariable> varMapping = iter.next();
survivedUnionSourceVarSet.add(varMapping.first);
@@ -240,8 +240,8 @@
}
private boolean removeUnusedVarsFromUnionAll(UnionAllOperator unionOp, Set<LogicalVariable> toRemove) {
- Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter = unionOp.getVariableMappings()
- .iterator();
+ Iterator<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> iter =
+ unionOp.getVariableMappings().iterator();
boolean modified = false;
if (toRemove != null && !toRemove.isEmpty()) {
while (iter.hasNext()) {
@@ -361,8 +361,8 @@
} else {
// A decor var mapping can have a variable reference expression without a new variable
// definition, which is for rebinding the referred variable.
- VariableReferenceExpression varExpr = (VariableReferenceExpression) decorMapping.second
- .getValue();
+ VariableReferenceExpression varExpr =
+ (VariableReferenceExpression) decorMapping.second.getValue();
LogicalVariable reboundDecorVar = varExpr.getVariableReference();
assignVarsSetInThisOp.add(reboundDecorVar);
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
index 84961d6..5b6285a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/SetAlgebricksPhysicalOperatorsRule.java
@@ -172,8 +172,7 @@
boolean hasIntermediateAgg = generateMergeAggregationExpressions(gby, context);
if (hasIntermediateAgg) {
ExternalGroupByPOperator externalGby = new ExternalGroupByPOperator(
- gby.getGroupByList(),
- physicalOptimizationConfig.getMaxFramesForGroupBy(),
+ gby.getGroupByList(), physicalOptimizationConfig.getMaxFramesForGroupBy(),
(long) physicalOptimizationConfig.getMaxFramesForGroupBy()
* physicalOptimizationConfig.getFrameSize());
op.setPhysicalOperator(externalGby);
@@ -354,8 +353,9 @@
prevSecondaryKeys = new ArrayList<LogicalVariable>();
getKeys(opInsDel.getPrevSecondaryKeyExprs(), prevSecondaryKeys);
if (opInsDel.getPrevAdditionalFilteringExpression() != null) {
- prevAdditionalFilteringKey = ((VariableReferenceExpression) (opInsDel
- .getPrevAdditionalFilteringExpression()).getValue()).getVariableReference();
+ prevAdditionalFilteringKey =
+ ((VariableReferenceExpression) (opInsDel.getPrevAdditionalFilteringExpression())
+ .getValue()).getVariableReference();
}
}
op.setPhysicalOperator(new IndexInsertDeleteUpsertPOperator(primaryKeys, secondaryKeys,
@@ -441,8 +441,8 @@
"External group-by currently works only for one nested plan with one root containing"
+ "an aggregate and a nested-tuple-source.");
}
- IMergeAggregationExpressionFactory mergeAggregationExpressionFactory = context
- .getMergeAggregationExpressionFactory();
+ IMergeAggregationExpressionFactory mergeAggregationExpressionFactory =
+ context.getMergeAggregationExpressionFactory();
Mutable<ILogicalOperator> r0 = p0.getRoots().get(0);
AbstractLogicalOperator r0Logical = (AbstractLogicalOperator) r0.getValue();
if (r0Logical.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java
index 32c7e03..4cd15a0 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanRule.java
@@ -74,8 +74,8 @@
private boolean elimOneSubplanWithNoFreeVars(Mutable<ILogicalOperator> opRef) {
SubplanOperator subplan = (SubplanOperator) opRef.getValue();
- AbstractLogicalOperator rootOp = (AbstractLogicalOperator) subplan.getNestedPlans().get(0).getRoots().get(0)
- .getValue();
+ AbstractLogicalOperator rootOp =
+ (AbstractLogicalOperator) subplan.getNestedPlans().get(0).getRoots().get(0).getValue();
if (rootOp.getOperatorTag() == LogicalOperatorTag.EMPTYTUPLESOURCE
|| rootOp.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
opRef.setValue(subplan.getInputs().get(0).getValue());
@@ -115,8 +115,8 @@
if (topOp == null) {
topOp = r.getValue();
} else {
- InnerJoinOperator j = new InnerJoinOperator(
- new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
+ InnerJoinOperator j =
+ new InnerJoinOperator(new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
j.getInputs().add(new MutableObject<ILogicalOperator>(topOp));
j.getInputs().add(r);
ctx.setOutputTypeEnvironment(j, j.computeOutputTypeEnvironment(ctx));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java
index 607ea1f..e2576ba 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/EliminateSubplanWithInputCardinalityOneRule.java
@@ -122,8 +122,8 @@
// Replaces all Nts' in the nested plan with the Subplan input operator or its deep copy.
ILogicalOperator topOperator = rootRefs.get(0).getValue();
- ReplaceNtsWithSubplanInputOperatorVisitor visitor = new ReplaceNtsWithSubplanInputOperatorVisitor(context,
- subplan);
+ ReplaceNtsWithSubplanInputOperatorVisitor visitor =
+ new ReplaceNtsWithSubplanInputOperatorVisitor(context, subplan);
ILogicalOperator newTopOperator = topOperator.accept(visitor, null);
currentOpRef.setValue(newTopOperator);
OperatorManipulationUtil.computeTypeEnvironmentBottomUp(newTopOperator, context);
@@ -168,7 +168,7 @@
*/
private void isCardinalityOne(Mutable<ILogicalOperator> opRef, Set<LogicalVariable> freeVars,
Set<LogicalVariable> varsWithCardinalityOne, Set<LogicalVariable> varsLiveAtUnnestAndJoin)
- throws AlgebricksException {
+ throws AlgebricksException {
AbstractLogicalOperator operator = (AbstractLogicalOperator) opRef.getValue();
List<LogicalVariable> liveVars = new ArrayList<>();
VariableUtilities.getLiveVariables(operator, liveVars);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
index 942f181..c4ea604 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java
@@ -236,8 +236,8 @@
}
if (testForNull == null) {
testForNull = context.newVar();
- AssignOperator tmpAsgn = new AssignOperator(testForNull,
- new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
+ AssignOperator tmpAsgn =
+ new AssignOperator(testForNull, new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
rightRef.setValue(tmpAsgn);
context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
@@ -247,10 +247,10 @@
ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
- ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
- new MutableObject<ILogicalExpression>(isNullTest));
- SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
- null);
+ ScalarFunctionCallExpression nonNullTest =
+ new ScalarFunctionCallExpression(finfoNot, new MutableObject<ILogicalExpression>(isNullTest));
+ SelectOperator selectNonNull =
+ new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false, null);
GroupByOperator g = new GroupByOperator();
Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java
index fa893d5..94cae74 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/MoveFreeVariableOperatorOutOfSubplanRule.java
@@ -125,7 +125,7 @@
LogicalOperatorTag operatorTag = currentOperator.getOperatorTag();
if (operatorTag == LogicalOperatorTag.AGGREGATE || operatorTag == LogicalOperatorTag.RUNNINGAGGREGATE
|| operatorTag == LogicalOperatorTag.GROUP) {
- return false;
+ return false;
}
if (operatorTag == LogicalOperatorTag.PROJECT) {
Set<LogicalVariable> producedVars = new HashSet<>();
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java
index 35c7e4e..d9acf53 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/NestedSubplanToJoinRule.java
@@ -109,8 +109,8 @@
**/
Mutable<ILogicalExpression> expr = new MutableObject<ILogicalExpression>(ConstantExpression.TRUE);
Mutable<ILogicalOperator> nestedRootRef = nestedRoots.get(0);
- ILogicalOperator join = new InnerJoinOperator(expr, new MutableObject<ILogicalOperator>(subplanInput),
- nestedRootRef);
+ ILogicalOperator join =
+ new InnerJoinOperator(expr, new MutableObject<ILogicalOperator>(subplanInput), nestedRootRef);
/** rewrite the nested tuple source to be empty tuple source */
rewriteNestedTupleSource(nestedRootRef, context);
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java
index af95ecd..9d3b311 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/PushSubplanIntoGroupByRule.java
@@ -154,8 +154,8 @@
// Sets the nts for a original subplan.
Mutable<ILogicalOperator> originalGbyRootOpRef = gbyNestedPlan.getRoots().get(rootIndex);
Mutable<ILogicalOperator> originalGbyNtsRef = downToNts(originalGbyRootOpRef);
- NestedTupleSourceOperator originalNts = (NestedTupleSourceOperator) originalGbyNtsRef
- .getValue();
+ NestedTupleSourceOperator originalNts =
+ (NestedTupleSourceOperator) originalGbyNtsRef.getValue();
originalNts.setDataSourceReference(new MutableObject<>(gby));
// Pushes a new subplan if possible.
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
index 080828d..35aa984 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/ReplaceNtsWithSubplanInputOperatorVisitor.java
@@ -141,8 +141,8 @@
isOriginalCopyUsed = true;
return subplanInputOperator;
}
- LogicalOperatorDeepCopyWithNewVariablesVisitor visitor = new LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx,
- ctx);
+ LogicalOperatorDeepCopyWithNewVariablesVisitor visitor =
+ new LogicalOperatorDeepCopyWithNewVariablesVisitor(ctx, ctx);
ILogicalOperator copiedSubplanInputOperator = visitor.deepCopy(subplanInputOperator);
varMap.putAll(visitor.getInputToOutputVariableMapping());
return copiedSubplanInputOperator;
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java
index 049e853..b9b2cee 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/SubplanOutOfGroupRule.java
@@ -51,7 +51,8 @@
public class SubplanOutOfGroupRule implements IAlgebraicRewriteRule {
@Override
- public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException {
+ public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
+ throws AlgebricksException {
return false;
}
diff --git a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
index 99480bf..15ae32a 100644
--- a/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/util/PhysicalOptimizationsUtil.java
@@ -41,15 +41,16 @@
visitOperatorAndItsDescendants(op, visitor, ctx);
}
- public static <R> void visitOperatorAndItsDescendants(ILogicalOperator op, ILogicalOperatorVisitor<R, IOptimizationContext> visitor,
- IOptimizationContext ctx) throws AlgebricksException {
+ public static <R> void visitOperatorAndItsDescendants(ILogicalOperator op,
+ ILogicalOperatorVisitor<R, IOptimizationContext> visitor, IOptimizationContext ctx)
+ throws AlgebricksException {
Set<ILogicalOperator> visitSet = new HashSet<ILogicalOperator>();
computeFDsAndEqClassesWithVisitorRec(op, ctx, visitor, visitSet);
}
private static <R> void computeFDsAndEqClassesWithVisitorRec(ILogicalOperator op, IOptimizationContext ctx,
ILogicalOperatorVisitor<R, IOptimizationContext> visitor, Set<ILogicalOperator> visitSet)
- throws AlgebricksException {
+ throws AlgebricksException {
visitSet.add(op);
for (Mutable<ILogicalOperator> i : op.getInputs()) {
computeFDsAndEqClassesWithVisitorRec((AbstractLogicalOperator) i.getValue(), ctx, visitor, visitSet);
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
index 1e06c76..28590ec 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/aggreg/SerializableAggregatorDescriptorFactory.java
@@ -93,8 +93,8 @@
int fieldSlotLength = stateAccessor.getFieldSlotsLength();
for (int i = 0; i < aggs.length; i++) {
byte[] data = stateAccessor.getBuffer().array();
- int start = stateAccessor.getFieldStartOffset(stateTupleIndex, i + keys.length)
- + stateTupleStart + fieldSlotLength;
+ int start = stateAccessor.getFieldStartOffset(stateTupleIndex, i + keys.length) + stateTupleStart
+ + fieldSlotLength;
aggs[i].step(ftr, data, start, stateFieldLength[i]);
}
}
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
index f6a349f..7e04750 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/meta/SubplanRuntimeFactory.java
@@ -73,8 +73,8 @@
RecordDescriptor pipelineOutputRecordDescriptor = null;
- final PipelineAssembler pa = new PipelineAssembler(pipeline, 1, 1, inputRecordDesc,
- pipelineOutputRecordDescriptor);
+ final PipelineAssembler pa =
+ new PipelineAssembler(pipeline, 1, 1, inputRecordDesc, pipelineOutputRecordDescriptor);
final IMissingWriter[] nullWriters = new IMissingWriter[missingWriterFactories.length];
for (int i = 0; i < missingWriterFactories.length; i++) {
nullWriters[i] = missingWriterFactories[i].createMissingWriter();
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
index bb8223d..f251bb7 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/InMemorySortRuntimeFactory.java
@@ -44,10 +44,9 @@
public InMemorySortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory firstKeyNormalizerFactory,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList) {
- this(sortFields,
- firstKeyNormalizerFactory != null ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory }
- : null,
- comparatorFactories, projectionList);
+ this(sortFields, firstKeyNormalizerFactory != null
+ ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory } : null, comparatorFactories,
+ projectionList);
}
public InMemorySortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory[] keyNormalizerFactories,
diff --git a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
index 0e49d22..763e6ff 100644
--- a/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
+++ b/hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/serializer/ResultSerializerFactoryProvider.java
@@ -47,8 +47,8 @@
@Override
public IResultSerializer createResultSerializer(RecordDescriptor inputRecordDesc, PrintStream printStream) {
- final IAWriter writer = writerFactory.createWriter(fields, printStream, printerFactories,
- inputRecordDesc);
+ final IAWriter writer =
+ writerFactory.createWriter(fields, printStream, printerFactories, inputRecordDesc);
return new IResultSerializer() {
private static final long serialVersionUID = 1L;
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java b/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
index 26790c5..afbff40 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/main/java/org/apache/hyracks/algebricks/tests/pushruntime/IntegerConstantEvalFactory.java
@@ -48,7 +48,7 @@
private ArrayBackedValueStorage buf = new ArrayBackedValueStorage();
{
- IntegerSerializerDeserializer.INSTANCE.serialize(value, buf.getDataOutput());
+ IntegerSerializerDeserializer.INSTANCE.serialize(value, buf.getDataOutput());
}
@Override
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
index cc4c1b9..40e2ec6 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/pushruntime/PushRuntimeTest.java
@@ -141,9 +141,9 @@
PrinterRuntimeFactory printer = new PrinterRuntimeFactory(new int[] { 0, 1 },
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, assignDesc);
- AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
- new IPushRuntimeFactory[] { ets, assign, printer },
- new RecordDescriptor[] { etsDesc, assignDesc, null });
+ AlgebricksMetaOperatorDescriptor algebricksOp =
+ new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, assign, printer },
+ new RecordDescriptor[] { etsDesc, assignDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
@@ -168,9 +168,9 @@
new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE, IntegerPrinterFactory.INSTANCE }, outFile,
PrinterBasedWriterFactory.INSTANCE, assignDesc);
- AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
- new IPushRuntimeFactory[] { ets, assign, writer },
- new RecordDescriptor[] { etsDesc, assignDesc, null });
+ AlgebricksMetaOperatorDescriptor algebricksOp =
+ new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, assign, writer },
+ new RecordDescriptor[] { etsDesc, assignDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp, DEFAULT_NODES);
spec.addRoot(algebricksOp);
AlgebricksHyracksIntegrationUtil.runJob(spec);
@@ -187,28 +187,28 @@
// the scanner
FileSplit[] intFileSplits = new FileSplit[1];
- intFileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "simple" + File.separator + "int-part1.tbl");
+ intFileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "simple" + File.separator + "int-part1.tbl");
IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(intFileSplits);
- RecordDescriptor intScannerDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor intScannerDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE };
FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), intScannerDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, intScanner, DEFAULT_NODES);
// the algebricks op.
- IScalarEvaluatorFactory cond = new IntegerGreaterThanEvalFactory(new IntegerConstantEvalFactory(2),
- new TupleFieldEvaluatorFactory(0));
+ IScalarEvaluatorFactory cond =
+ new IntegerGreaterThanEvalFactory(new IntegerConstantEvalFactory(2), new TupleFieldEvaluatorFactory(0));
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 0 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
RecordDescriptor selectDesc = intScannerDesc;
String filePath = PATH_ACTUAL + SEPARATOR + "scanSelectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- selectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
@@ -240,14 +240,14 @@
RecordDescriptor assignDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project = new StreamProjectRuntimeFactory(new int[] { 1 });
- RecordDescriptor projectDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor projectDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignProjectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- projectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, projectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign, project, writer },
@@ -270,8 +270,8 @@
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
- fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl");
+ fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -290,14 +290,14 @@
// the algebricks op.
StreamLimitRuntimeFactory limit = new StreamLimitRuntimeFactory(new IntegerConstantEvalFactory(2), null,
new int[] { 0 }, BinaryIntegerInspectorImpl.FACTORY);
- RecordDescriptor limitDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor limitDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanLimitWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- limitDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, limitDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { limit, writer }, new RecordDescriptor[] { limitDesc, null });
@@ -323,18 +323,18 @@
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
- RecordDescriptor unnestDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor unnestDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- unnestDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, unnestDesc);
- AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
- new IPushRuntimeFactory[] { ets, unnest, writer },
- new RecordDescriptor[] { etsDesc, unnestDesc, null });
+ AlgebricksMetaOperatorDescriptor algebricksOp =
+ new AlgebricksMetaOperatorDescriptor(spec, 0, 0, new IPushRuntimeFactory[] { ets, unnest, writer },
+ new RecordDescriptor[] { etsDesc, unnestDesc, null });
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, algebricksOp,
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
spec.addRoot(algebricksOp);
@@ -373,14 +373,14 @@
// the algebricks op.
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
- RecordDescriptor aggDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor aggDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanAggregateWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- aggDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, aggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { agg, writer }, new RecordDescriptor[] { aggDesc, null });
@@ -404,8 +404,8 @@
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
- fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl");
+ fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
@@ -436,8 +436,8 @@
RecordDescriptor ntsDesc = sortDesc;
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
- RecordDescriptor aggDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor aggDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg },
new RecordDescriptor[] { ntsDesc, aggDesc });
NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(
@@ -452,18 +452,18 @@
new String[] { AlgebricksHyracksIntegrationUtil.NC1_ID });
// the algebricks op.
- IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3),
- new TupleFieldEvaluatorFactory(0)); // Canadian customers
+ IScalarEvaluatorFactory cond =
+ new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3), new TupleFieldEvaluatorFactory(0)); // Canadian customers
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
- RecordDescriptor selectDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor selectDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- selectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { select, writer }, new RecordDescriptor[] { selectDesc, null });
@@ -491,8 +491,8 @@
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
IUnnestingEvaluatorFactory aggregFactory = new IntArrayUnnester(new int[] { 100, 200, 300 });
UnnestRuntimeFactory unnest = new UnnestRuntimeFactory(0, aggregFactory, new int[] { 0 }, false, null);
- RecordDescriptor unnestDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor unnestDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
RunningAggregateRuntimeFactory ragg = new RunningAggregateRuntimeFactory(new int[] { 1 },
new IRunningAggregateEvaluatorFactory[] { new TupleCountRunningAggregateFunctionFactory() },
@@ -502,9 +502,9 @@
String filePath = PATH_ACTUAL + SEPARATOR + "etsUnnestRunningaggregateWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 1 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- raggDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 1 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, raggDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, unnest, ragg, writer },
@@ -584,13 +584,13 @@
String inputFileName = "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl";
- FileSplit[] inputSplits = new FileSplit[] {
- new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName) };
+ FileSplit[] inputSplits =
+ new FileSplit[] { new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName) };
DelimitedDataTupleParserFactory stringParser = new DelimitedDataTupleParserFactory(
new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, '\u0000');
- RecordDescriptor stringRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
+ RecordDescriptor stringRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
FileScanOperatorDescriptor scanOp = new FileScanOperatorDescriptor(spec,
new ConstantFileSplitProvider(inputSplits), stringParser, stringRec);
@@ -624,8 +624,8 @@
AlgebricksHyracksIntegrationUtil.runJob(spec);
for (int i = 0; i < outputArity; i++) {
- compareFiles("data" + File.separator + "device0" + File.separator + inputFileName, outputFile[i]
- .getAbsolutePath());
+ compareFiles("data" + File.separator + "device0" + File.separator + inputFileName,
+ outputFile[i].getAbsolutePath());
}
}
@@ -643,8 +643,8 @@
JobSpecification spec = new JobSpecification(FRAME_SIZE);
- String inputFileName[] = { "data" + File.separator + "simple" + File.separator + "int-string-part1.tbl", "data"
- + File.separator + "simple" + File.separator + "int-string-part1-split-0.tbl",
+ String inputFileName[] = { "data" + File.separator + "simple" + File.separator + "int-string-part1.tbl",
+ "data" + File.separator + "simple" + File.separator + "int-string-part1-split-0.tbl",
"data" + File.separator + "simple" + File.separator + "int-string-part1-split-1.tbl" };
File[] inputFiles = new File[inputFileName.length];
for (int i = 0; i < inputFileName.length; i++) {
@@ -657,16 +657,15 @@
outputFile[i] = outputFileSplit[i].getFile(AlgebricksHyracksIntegrationUtil.nc1.getIoManager());
}
- FileSplit[] inputSplits = new FileSplit[] {
- new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName[0]) };
+ FileSplit[] inputSplits =
+ new FileSplit[] { new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, inputFileName[0]) };
IFileSplitProvider intSplitProvider = new ConstantFileSplitProvider(inputSplits);
- RecordDescriptor scannerDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() });
- IValueParserFactory[] valueParsers = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE };
+ IValueParserFactory[] valueParsers =
+ new IValueParserFactory[] { IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
FileScanOperatorDescriptor intScanner = new FileScanOperatorDescriptor(spec, intSplitProvider,
new DelimitedDataTupleParserFactory(valueParsers, '|'), scannerDesc);
@@ -697,8 +696,8 @@
AlgebricksHyracksIntegrationUtil.runJob(spec);
for (int i = 0; i < outputArity; i++) {
- compareFiles("data" + File.separator + "device0" + File.separator + inputFileName[i + 1], outputFile[i]
- .getAbsolutePath());
+ compareFiles("data" + File.separator + "device0" + File.separator + inputFileName[i + 1],
+ outputFile[i].getAbsolutePath());
}
}
@@ -708,8 +707,8 @@
// the scanner
FileSplit[] fileSplits = new FileSplit[1];
- fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "nation.tbl");
+ fileSplits[0] = new ManagedFileSplit(AlgebricksHyracksIntegrationUtil.NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "nation.tbl");
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
RecordDescriptor scannerDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
@@ -761,22 +760,24 @@
EmptyTupleSourceRuntimeFactory ets = new EmptyTupleSourceRuntimeFactory();
RecordDescriptor etsDesc = new RecordDescriptor(new ISerializerDeserializer[] {});
- AssignRuntimeFactory assign1 = new AssignRuntimeFactory(new int[] { 0 },
- new IScalarEvaluatorFactory[] { const1 }, new int[] { 0 });
- RecordDescriptor assign1Desc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ AssignRuntimeFactory assign1 =
+ new AssignRuntimeFactory(new int[] { 0 }, new IScalarEvaluatorFactory[] { const1 }, new int[] { 0 });
+ RecordDescriptor assign1Desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
NestedTupleSourceRuntimeFactory nts = new NestedTupleSourceRuntimeFactory();
- AssignRuntimeFactory assign2 = new AssignRuntimeFactory(new int[] { 1 },
- new IScalarEvaluatorFactory[] { new IntegerAddEvalFactory(new TupleFieldEvaluatorFactory(0), const2) },
- new int[] { 0, 1 });
+ AssignRuntimeFactory assign2 =
+ new AssignRuntimeFactory(new int[] { 1 },
+ new IScalarEvaluatorFactory[] {
+ new IntegerAddEvalFactory(new TupleFieldEvaluatorFactory(0), const2) },
+ new int[] { 0, 1 });
RecordDescriptor assign2Desc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project1 = new StreamProjectRuntimeFactory(new int[] { 1 });
- RecordDescriptor project1Desc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor project1Desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, assign2, project1 },
new RecordDescriptor[] { assign1Desc, assign2Desc, project1Desc });
@@ -788,14 +789,14 @@
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
StreamProjectRuntimeFactory project2 = new StreamProjectRuntimeFactory(new int[] { 1 });
- RecordDescriptor project2Desc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor project2Desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "etsAssignSubplanProjectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- project2Desc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, project2Desc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 0, 0,
new IPushRuntimeFactory[] { ets, assign1, subplan, project2, writer },
@@ -847,31 +848,33 @@
RecordDescriptor ntsDesc = sortDesc;
AggregateRuntimeFactory agg = new AggregateRuntimeFactory(
new IAggregateEvaluatorFactory[] { new TupleCountAggregateFunctionFactory() });
- RecordDescriptor aggDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor aggDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
AlgebricksPipeline pipeline = new AlgebricksPipeline(new IPushRuntimeFactory[] { nts, agg },
new RecordDescriptor[] { ntsDesc, aggDesc });
NestedPlansAccumulatingAggregatorFactory npaaf = new NestedPlansAccumulatingAggregatorFactory(
new AlgebricksPipeline[] { pipeline }, new int[] { 3 }, new int[] {});
RecordDescriptor gbyDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
- MicroPreClusteredGroupRuntimeFactory gby = new MicroPreClusteredGroupRuntimeFactory(new int[] { 3 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, npaaf,
- sortDesc, gbyDesc, null);
+ MicroPreClusteredGroupRuntimeFactory gby =
+ new MicroPreClusteredGroupRuntimeFactory(new int[] { 3 },
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ npaaf, sortDesc, gbyDesc, null);
// the algebricks op.
- IScalarEvaluatorFactory cond = new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3),
- new TupleFieldEvaluatorFactory(0)); // Canadian customers
+ IScalarEvaluatorFactory cond =
+ new IntegerEqualsEvalFactory(new IntegerConstantEvalFactory(3), new TupleFieldEvaluatorFactory(0)); // Canadian customers
StreamSelectRuntimeFactory select = new StreamSelectRuntimeFactory(cond, new int[] { 1 },
BinaryBooleanInspectorImpl.FACTORY, false, -1, null);
- RecordDescriptor selectDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+ RecordDescriptor selectDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
String filePath = PATH_ACTUAL + SEPARATOR + "scanSortGbySelectWrite.out";
File outFile = new File(filePath);
- SinkWriterRuntimeFactory writer = new SinkWriterRuntimeFactory(new int[] { 0 },
- new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE }, outFile, PrinterBasedWriterFactory.INSTANCE,
- selectDesc);
+ SinkWriterRuntimeFactory writer =
+ new SinkWriterRuntimeFactory(new int[] { 0 }, new IPrinterFactory[] { IntegerPrinterFactory.INSTANCE },
+ outFile, PrinterBasedWriterFactory.INSTANCE, selectDesc);
AlgebricksMetaOperatorDescriptor algebricksOp = new AlgebricksMetaOperatorDescriptor(spec, 1, 0,
new IPushRuntimeFactory[] { sort, gby, select, writer },
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java
index 6770494..82be109 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/tools/WriteValueTest.java
@@ -66,8 +66,8 @@
WriteValueTools.writeInt(i, baaos);
byte[] goal = Integer.toString(i).getBytes();
if (baaos.size() != goal.length) {
- throw new Exception("Expecting to write " + i + " in " + goal.length + " bytes, but found " + baaos.size()
- + " bytes.");
+ throw new Exception(
+ "Expecting to write " + i + " in " + goal.length + " bytes, but found " + baaos.size() + " bytes.");
}
for (int k = 0; k < goal.length; k++) {
if (goal[k] != baaos.getByteArray()[k]) {
@@ -82,8 +82,8 @@
WriteValueTools.writeLong(x, baaos);
byte[] goal = Long.toString(x).getBytes();
if (baaos.size() != goal.length) {
- throw new Exception("Expecting to write " + x + " in " + goal.length + " bytes, but found " + baaos.size()
- + " bytes.");
+ throw new Exception(
+ "Expecting to write " + x + " in " + goal.length + " bytes, but found " + baaos.size() + " bytes.");
}
for (int k = 0; k < goal.length; k++) {
if (goal[k] != baaos.getByteArray()[k]) {
@@ -100,8 +100,8 @@
WriteValueTools.writeUTF8StringWithQuotes(str, baaos);
byte[] b = str.getBytes("UTF-8");
if (baaos.size() != b.length + 2) {
- throw new Exception("Expecting to write " + b + " in " + b.length + " bytes, but found " + baaos.size()
- + " bytes.");
+ throw new Exception(
+ "Expecting to write " + b + " in " + b.length + " bytes, but found " + baaos.size() + " bytes.");
}
if (baaos.getByteArray()[0] != '\"' || baaos.getByteArray()[baaos.size() - 1] != '\"') {
throw new Exception("Missing quotes.");
diff --git a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
index 2971b72..1df9824 100644
--- a/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
+++ b/hyracks-fullstack/algebricks/algebricks-tests/src/test/java/org/apache/hyracks/algebricks/tests/util/AlgebricksHyracksIntegrationUtil.java
@@ -65,8 +65,7 @@
ncConfig1.setClusterListenAddress("127.0.0.1");
ncConfig1.setDataListenAddress("127.0.0.1");
ncConfig1.setResultListenAddress("127.0.0.1");
- ncConfig1.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data",
- "device0") });
+ ncConfig1.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
FileUtils.forceMkdir(new File(ncConfig1.getIODevices()[0]));
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
@@ -77,8 +76,7 @@
ncConfig2.setClusterListenAddress("127.0.0.1");
ncConfig2.setDataListenAddress("127.0.0.1");
ncConfig2.setResultListenAddress("127.0.0.1");
- ncConfig2.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data",
- "device1") });
+ ncConfig2.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
FileUtils.forceMkdir(new File(ncConfig1.getIODevices()[0]));
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
index cd6362f..e2cd923 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/ActivityClusterGraphBuilder.java
@@ -118,10 +118,10 @@
for (int i = 0; i < nActivityOutputs; ++i) {
IConnectorDescriptor conn = aOutputs.get(i);
ac.addConnector(conn);
- Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> pcPair = jag.getConnectorActivityMap()
- .get(conn.getConnectorId());
- ac.connect(conn, activity, i, pcPair.getRight().getLeft(), pcPair.getRight().getRight(), jag
- .getConnectorRecordDescriptorMap().get(conn.getConnectorId()));
+ Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> pcPair =
+ jag.getConnectorActivityMap().get(conn.getConnectorId());
+ ac.connect(conn, activity, i, pcPair.getRight().getLeft(), pcPair.getRight().getRight(),
+ jag.getConnectorRecordDescriptorMap().get(conn.getConnectorId()));
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
index 64bcf6e..b4d5ba4 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/client/impl/JobActivityGraphBuilder.java
@@ -101,8 +101,8 @@
}
public void finish() {
- Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> caMap = jag
- .getConnectorActivityMap();
+ Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> caMap =
+ jag.getConnectorActivityMap();
connectorProducerMap
.forEach((cdId, producer) -> caMap.put(cdId, Pair.of(producer, connectorConsumerMap.get(cdId))));
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java
index cd74659..04c27be 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameConstants.java
@@ -43,7 +43,7 @@
/**
* Indicate the total size of the meta data.
*/
- int META_DATA_LEN = SIZE_LEN + TUPLE_START_OFFSET;
+ int META_DATA_LEN = SIZE_LEN + TUPLE_START_OFFSET;
boolean DEBUG_FRAME_IO = false;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java
index 68533c6..1242ba0 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/FrameHelper.java
@@ -38,7 +38,8 @@
}
public static void serializeFrameSize(ByteBuffer outputFrame, int start, int numberOfMinFrame) {
- IntSerDeUtils.putInt(outputFrame.array(), start + FrameConstants.META_DATA_FRAME_COUNT_OFFSET, numberOfMinFrame);
+ IntSerDeUtils.putInt(outputFrame.array(), start + FrameConstants.META_DATA_FRAME_COUNT_OFFSET,
+ numberOfMinFrame);
}
public static int deserializeNumOfMinFrame(ByteBuffer frame) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java
index 64fa322..9a2db6f 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/comm/IFrameTupleAppender.java
@@ -33,11 +33,11 @@
boolean append(IFrameTupleAccessor tupleAccessor, int tStartOffset, int tEndOffset) throws HyracksDataException;
- boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1,
- int tIndex1) throws HyracksDataException;
+ boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, IFrameTupleAccessor accessor1, int tIndex1)
+ throws HyracksDataException;
- boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, int[] fieldSlots1, byte[] bytes1,
- int offset1, int dataLen1) throws HyracksDataException;
+ boolean appendConcat(IFrameTupleAccessor accessor0, int tIndex0, int[] fieldSlots1, byte[] bytes1, int offset1,
+ int dataLen1) throws HyracksDataException;
boolean appendProjection(IFrameTupleAccessor accessor, int tIndex, int[] fields) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
index d42cbb3..402f02e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IApplicationConfig.java
@@ -60,38 +60,38 @@
}
default long getLong(IOption option) {
- return (long)get(option);
+ return (long) get(option);
}
default int getInt(IOption option) {
- return (int)get(option);
+ return (int) get(option);
}
default short getShort(IOption option) {
- return (short)get(option);
+ return (short) get(option);
}
default String getString(IOption option) {
- return (String)get(option);
+ return (String) get(option);
}
default boolean getBoolean(IOption option) {
- return (boolean)get(option);
+ return (boolean) get(option);
}
default Level getLoggingLevel(IOption option) {
- return (Level)get(option);
+ return (Level) get(option);
}
default double getDouble(IOption option) {
- return (double)get(option);
+ return (double) get(option);
}
- default String [] getStringArray(IOption option) {
- return (String [])get(option);
+ default String[] getStringArray(IOption option) {
+ return (String[]) get(option);
}
default URL getURL(IOption option) {
- return (URL)get(option);
+ return (URL) get(option);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
index ed6dcd0..5f11214 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/config/IOption.java
@@ -56,7 +56,9 @@
/**
* @return a true value indicates this option should not be advertised (e.g. command-line usage, documentation)
*/
- default boolean hidden() { return false; }
+ default boolean hidden() {
+ return false;
+ }
default String cmdline() {
return "-" + name().toLowerCase().replace("_", "-");
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
index 95f5e1a..762fdee 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/constraints/PartitionConstraintHelper.java
@@ -26,8 +26,8 @@
public class PartitionConstraintHelper {
public static void addPartitionCountConstraint(JobSpecification spec, IOperatorDescriptor op, int count) {
- spec.addUserConstraint(new Constraint(new PartitionCountExpression(op.getOperatorId()), new ConstantExpression(
- count)));
+ spec.addUserConstraint(
+ new Constraint(new PartitionCountExpression(op.getOperatorId()), new ConstantExpression(count)));
}
public static void addAbsoluteLocationConstraint(JobSpecification spec, IOperatorDescriptor op,
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java
index 5afcf69..1206d8e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/context/IHyracksCommonContext.java
@@ -20,7 +20,7 @@
import org.apache.hyracks.api.io.IIOManager;
-public interface IHyracksCommonContext extends IHyracksFrameMgrContext{
+public interface IHyracksCommonContext extends IHyracksFrameMgrContext {
public IIOManager getIoManager();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
index 23ae97b..8b514f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/dataflow/value/JSONSerializable.java
@@ -18,7 +18,6 @@
*/
package org.apache.hyracks.api.dataflow.value;
-
import com.fasterxml.jackson.databind.node.ObjectNode;
public interface JSONSerializable {
@@ -27,5 +26,5 @@
*
* @return A om.createObjectNode instance representing this Java object.
*/
- public ObjectNode toJSON() ;
+ public ObjectNode toJSON();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
index 34c58f8..a2d28e1 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/io/IODeviceHandle.java
@@ -48,8 +48,7 @@
public IODeviceHandle(File mount, String workspace) {
this.mount = mount;
this.workspace = workspace == null ? null
- : workspace.endsWith(File.separator) ? workspace.substring(0, workspace.length() - 1)
- : workspace;
+ : workspace.endsWith(File.separator) ? workspace.substring(0, workspace.length() - 1) : workspace;
}
public File getMount() {
@@ -78,7 +77,7 @@
* comma separated list of devices
* @return
*/
- public static List<IODeviceHandle> getDevices(String [] ioDevices) {
+ public static List<IODeviceHandle> getDevices(String[] ioDevices) {
List<IODeviceHandle> devices = new ArrayList<>();
for (String ioDevice : ioDevices) {
String devPath = ioDevice.trim();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
index e5fad32..94e9c74 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityCluster.java
@@ -74,7 +74,8 @@
connectorRecordDescriptorMap = new HashMap<ConnectorDescriptorId, RecordDescriptor>();
activityInputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
activityOutputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
- connectorActivityMap = new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
+ connectorActivityMap =
+ new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
blocked2blockerMap = new HashMap<ActivityId, Set<ActivityId>>();
dependencies = new ArrayList<ActivityCluster>();
}
@@ -108,8 +109,7 @@
}
insertIntoIndexedMap(activityInputMap, consumerActivity.getActivityId(), consumerPort, connector);
insertIntoIndexedMap(activityOutputMap, producerActivity.getActivityId(), producerPort, connector);
- connectorActivityMap.put(
- connector.getConnectorId(),
+ connectorActivityMap.put(connector.getConnectorId(),
Pair.<Pair<IActivity, Integer>, Pair<IActivity, Integer>> of(
Pair.<IActivity, Integer> of(producerActivity, producerPort),
Pair.<IActivity, Integer> of(consumerActivity, consumerPort)));
@@ -187,7 +187,7 @@
vList.set(index, value);
}
- public JsonNode toJSON() {
+ public JsonNode toJSON() {
ObjectMapper om = new ObjectMapper();
ArrayNode jans = om.createArrayNode();
ObjectNode jac = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
index b64e2d5..5816c8f 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/ActivityClusterGraph.java
@@ -161,7 +161,7 @@
return ac.getProducerActivity(cid);
}
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode acgj = om.createObjectNode();
ArrayNode acl = om.createArrayNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
index d23b944..bfa126a 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/IActivityClusterGraphGeneratorFactory.java
@@ -25,8 +25,8 @@
import org.apache.hyracks.api.exceptions.HyracksException;
public interface IActivityClusterGraphGeneratorFactory extends Serializable {
- public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(
- ICCServiceContext ccServiceCtx, Set<JobFlag> jobFlags) throws HyracksException;
+ public IActivityClusterGraphGenerator createActivityClusterGraphGenerator(ICCServiceContext ccServiceCtx,
+ Set<JobFlag> jobFlags) throws HyracksException;
public JobSpecification getJobSpecification();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java
index de2759c..19d8484 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobActivityGraph.java
@@ -55,7 +55,8 @@
connectorRecordDescriptorMap = new HashMap<ConnectorDescriptorId, RecordDescriptor>();
activityInputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
activityOutputMap = new HashMap<ActivityId, List<IConnectorDescriptor>>();
- connectorActivityMap = new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
+ connectorActivityMap =
+ new HashMap<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>>();
blocked2blockerMap = new HashMap<ActivityId, Set<ActivityId>>();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java
index 713219e..2073728 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobInfo.java
@@ -33,7 +33,8 @@
private final Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations;
- public JobInfo(JobId jobId, JobStatus jobStatus, Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations) {
+ public JobInfo(JobId jobId, JobStatus jobStatus,
+ Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations) {
this.jobId = jobId;
this.operatorLocations = operatorLocations;
this.status = jobStatus;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java
index d8c9a9c..7f3194e 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/job/JobSerializerDeserializerContainer.java
@@ -27,7 +27,8 @@
public class JobSerializerDeserializerContainer implements IJobSerializerDeserializerContainer {
private IJobSerializerDeserializer defaultJobSerDe = new JobSerializerDeserializer();
- private Map<DeploymentId, IJobSerializerDeserializer> jobSerializerDeserializerMap = new ConcurrentHashMap<DeploymentId, IJobSerializerDeserializer>();
+ private Map<DeploymentId, IJobSerializerDeserializer> jobSerializerDeserializerMap =
+ new ConcurrentHashMap<DeploymentId, IJobSerializerDeserializer>();
@Override
public synchronized IJobSerializerDeserializer getJobSerializerDeserializer(DeploymentId deploymentId) {
@@ -39,7 +40,8 @@
}
@Override
- public synchronized void addJobSerializerDeserializer(DeploymentId deploymentId, IJobSerializerDeserializer jobSerDe) {
+ public synchronized void addJobSerializerDeserializer(DeploymentId deploymentId,
+ IJobSerializerDeserializer jobSerDe) {
jobSerializerDeserializerMap.put(deploymentId, jobSerDe);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
index 7cdb300..f3059c1 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/ActivityClusterGraphRewriter.java
@@ -48,8 +48,8 @@
* @author yingyib
*/
public class ActivityClusterGraphRewriter {
- private static final String ONE_TO_ONE_CONNECTOR = "org.apache.hyracks.dataflow.std.connectors."
- + "OneToOneConnectorDescriptor";
+ private static final String ONE_TO_ONE_CONNECTOR =
+ "org.apache.hyracks.dataflow.std.connectors." + "OneToOneConnectorDescriptor";
/**
* rewrite an activity cluster graph to eliminate
@@ -90,8 +90,8 @@
replacedBlockers = new HashSet<>();
for (ActivityId blocker : blockers) {
replacedBlockers.add(invertedAid2SuperAidMap.get(blocker));
- ActivityCluster dependingAc = ac.getActivityClusterGraph().getActivityMap()
- .get(invertedAid2SuperAidMap.get(blocker));
+ ActivityCluster dependingAc =
+ ac.getActivityClusterGraph().getActivityMap().get(invertedAid2SuperAidMap.get(blocker));
if (!ac.getDependencies().contains(dependingAc)) {
ac.getDependencies().add(dependingAc);
}
@@ -122,8 +122,8 @@
Map<ActivityId, IActivity> activities = ac.getActivityMap();
Map<ActivityId, List<IConnectorDescriptor>> activityInputMap = ac.getActivityInputMap();
Map<ActivityId, List<IConnectorDescriptor>> activityOutputMap = ac.getActivityOutputMap();
- Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> connectorActivityMap = ac
- .getConnectorActivityMap();
+ Map<ConnectorDescriptorId, Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>>> connectorActivityMap =
+ ac.getConnectorActivityMap();
ActivityClusterGraph acg = ac.getActivityClusterGraph();
Map<ActivityId, IActivity> startActivities = new HashMap<>();
Map<ActivityId, SuperActivity> superActivities = new HashMap<>();
@@ -177,8 +177,8 @@
List<IConnectorDescriptor> outputConnectors = activityOutputMap.get(expendingActivity.getActivityId());
if (outputConnectors != null) {
for (IConnectorDescriptor outputConn : outputConnectors) {
- Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints = connectorActivityMap
- .get(outputConn.getConnectorId());
+ Pair<Pair<IActivity, Integer>, Pair<IActivity, Integer>> endPoints =
+ connectorActivityMap.get(outputConn.getConnectorId());
IActivity newActivity = endPoints.getRight().getLeft();
SuperActivity existingSuperActivity = invertedActivitySuperActivityMap.get(newActivity);
if (outputConn.getClass().getName().contains(ONE_TO_ONE_CONNECTOR)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
index 68041bb..a93cb17 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/rewriter/OneToOneConnectedActivityCluster.java
@@ -38,10 +38,14 @@
private static final long serialVersionUID = 1L;
- protected final Map<Integer, Pair<ActivityId, Integer>> clusterInputIndexMap = new HashMap<Integer, Pair<ActivityId, Integer>>();
- protected final Map<Integer, Pair<ActivityId, Integer>> clusterOutputIndexMap = new HashMap<Integer, Pair<ActivityId, Integer>>();
- protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterOutputIndexMap = new HashMap<Pair<ActivityId, Integer>, Integer>();
- protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterInputIndexMap = new HashMap<Pair<ActivityId, Integer>, Integer>();
+ protected final Map<Integer, Pair<ActivityId, Integer>> clusterInputIndexMap =
+ new HashMap<Integer, Pair<ActivityId, Integer>>();
+ protected final Map<Integer, Pair<ActivityId, Integer>> clusterOutputIndexMap =
+ new HashMap<Integer, Pair<ActivityId, Integer>>();
+ protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterOutputIndexMap =
+ new HashMap<Pair<ActivityId, Integer>, Integer>();
+ protected final Map<Pair<ActivityId, Integer>, Integer> invertedClusterInputIndexMap =
+ new HashMap<Pair<ActivityId, Integer>, Integer>();
public OneToOneConnectedActivityCluster(ActivityClusterGraph acg, ActivityClusterId id) {
super(acg, id);
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java
index 57330af..0182e0c 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/topology/TopologyDefinitionParser.java
@@ -74,9 +74,10 @@
public void endElement(String uri, String localName, String qName) throws SAXException {
if ("network-switch".equals(localName) || "terminal".equals(localName)) {
ElementStackEntry e = stack.pop();
- NetworkEndpoint endpoint = e.type == EndpointType.NETWORK_SWITCH ? new NetworkSwitch(e.name,
- e.properties, e.ports.toArray(new NetworkSwitch.Port[e.ports.size()])) : new NetworkTerminal(
- e.name, e.properties);
+ NetworkEndpoint endpoint = e.type == EndpointType.NETWORK_SWITCH
+ ? new NetworkSwitch(e.name, e.properties,
+ e.ports.toArray(new NetworkSwitch.Port[e.ports.size()]))
+ : new NetworkTerminal(e.name, e.properties);
stack.peek().ports.add(new NetworkSwitch.Port(endpoint));
} else if ("property".equals(localName)) {
if (!inPropertyElement) {
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
index e5eec11..f396be9 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExecutionTimeProfiler.java
@@ -31,7 +31,6 @@
private StringBuilder sb;
private Object lock1 = new Object();
-
// [Key: Job, Value: [Key: Operator, Value: Duration of each operators]]
private HashMap<String, LinkedHashMap<String, String>> spentTimePerJobMap;
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java
index 2305573..abadde3 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/ExperimentProfilerUtils.java
@@ -25,8 +25,8 @@
import java.io.OutputStream;
public class ExperimentProfilerUtils {
- public static void printToOutputFile(StringBuffer sb, FileOutputStream fos) throws IllegalStateException,
- IOException {
+ public static void printToOutputFile(StringBuffer sb, FileOutputStream fos)
+ throws IllegalStateException, IOException {
fos.write(sb.toString().getBytes());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java
index 55c7915..02a1226 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/OperatorExecutionTimeProfiler.java
@@ -30,8 +30,8 @@
if (ExecutionTimeProfiler.PROFILE_MODE) {
//SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS");
try {
- executionTimeProfiler = new ExecutionTimeProfiler(profileHomeDir + "executionTime-"
- + Inet4Address.getLocalHost().getHostAddress() + ".txt");
+ executionTimeProfiler = new ExecutionTimeProfiler(
+ profileHomeDir + "executionTime-" + Inet4Address.getLocalHost().getHostAddress() + ".txt");
} catch (UnknownHostException e) {
e.printStackTrace();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java
index a1b40d0..899d3ee 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/main/java/org/apache/hyracks/api/util/SpatialIndexProfiler.java
@@ -41,8 +41,9 @@
}
falsePositivePerQuery.begin();
try {
- cacheMissPerQuery = new ExperimentProfiler(PROFILE_HOME_DIR + "cacheMissPerQuery-"
- + Inet4Address.getLocalHost().getHostAddress() + ".txt", 1);
+ cacheMissPerQuery = new ExperimentProfiler(
+ PROFILE_HOME_DIR + "cacheMissPerQuery-" + Inet4Address.getLocalHost().getHostAddress() + ".txt",
+ 1);
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
diff --git a/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java b/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java
index d16eb15..709f098 100644
--- a/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-api/src/test/java/org/apache/hyracks/api/job/JobIdFactoryTest.java
@@ -74,7 +74,7 @@
JobIdFactory factory = new JobIdFactory(ccId);
AtomicLong theId = (AtomicLong) idField.get(factory);
Assert.assertEquals(expected, theId.get());
- theId.set((((long)1 << 48) - 1) | expected);
+ theId.set((((long) 1 << 48) - 1) | expected);
JobId jobId = factory.create();
Assert.assertEquals(ccId, jobId.getCcId());
Assert.assertEquals(JobId.MAX_ID, jobId.getIdOnly());
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
index 4310cd0..7eeb913 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetDirectoryServiceInterfaceRemoteProxy.java
@@ -40,16 +40,16 @@
@Override
public Status getDatasetResultStatus(JobId jobId, ResultSetId rsId) throws Exception {
- HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction gdrlf = new HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction(
- jobId, rsId);
+ HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction gdrlf =
+ new HyracksClientInterfaceFunctions.GetDatasetResultStatusFunction(jobId, rsId);
return (Status) rpci.call(ipcHandle, gdrlf);
}
@Override
public DatasetDirectoryRecord[] getDatasetResultLocations(JobId jobId, ResultSetId rsId,
DatasetDirectoryRecord[] knownRecords) throws Exception {
- HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction gdrlf = new HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction(
- jobId, rsId, knownRecords);
+ HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction gdrlf =
+ new HyracksClientInterfaceFunctions.GetDatasetResultLocationsFunction(jobId, rsId, knownRecords);
return (DatasetDirectoryRecord[]) rpci.call(ipcHandle, gdrlf);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
index fc5708d..e7c9042 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/dataset/HyracksDatasetReader.java
@@ -75,8 +75,7 @@
public HyracksDatasetReader(IHyracksDatasetDirectoryServiceConnection datasetDirectoryServiceConnection,
ClientNetworkManager netManager, IHyracksCommonContext datasetClientCtx, JobId jobId,
- ResultSetId resultSetId)
- throws Exception {
+ ResultSetId resultSetId) throws Exception {
this.datasetDirectoryServiceConnection = datasetDirectoryServiceConnection;
this.netManager = netManager;
this.datasetClientCtx = datasetClientCtx;
@@ -105,8 +104,8 @@
private DatasetDirectoryRecord getRecord(int partition) throws Exception {
while (knownRecords == null || knownRecords[partition] == null) {
- knownRecords = datasetDirectoryServiceConnection
- .getDatasetResultLocations(jobId, resultSetId, knownRecords);
+ knownRecords =
+ datasetDirectoryServiceConnection.getDatasetResultLocations(jobId, resultSetId, knownRecords);
}
return knownRecords[partition];
}
@@ -157,7 +156,7 @@
readBuffer = resultChannel.getNextBuffer();
lastMonitor.notifyFrameRead();
if (readBuffer != null) {
- if (readSize <=0) {
+ if (readSize <= 0) {
int nBlocks = FrameHelper.deserializeNumOfMinFrame(readBuffer);
frame.ensureFrameSize(frame.getMinSize() * nBlocks);
frame.getBuffer().clear();
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
index 5d0865c..9e87f52 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/main/java/org/apache/hyracks/client/stats/impl/ClientCounterContext.java
@@ -39,9 +39,9 @@
* @author yingyib
*/
public class ClientCounterContext implements IClusterCounterContext {
- private static String[] RESET_COUNTERS = { Counters.NETWORK_IO_READ, Counters.NETWORK_IO_WRITE,
- Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.DISK_READ, Counters.DISK_WRITE,
- Counters.NUM_PROCESSOR };
+ private static String[] RESET_COUNTERS =
+ { Counters.NETWORK_IO_READ, Counters.NETWORK_IO_WRITE, Counters.MEMORY_USAGE, Counters.MEMORY_MAX,
+ Counters.DISK_READ, Counters.DISK_WRITE, Counters.NUM_PROCESSOR };
private static String[] AGG_COUNTERS = { Counters.SYSTEM_LOAD };
private static int UPDATE_INTERVAL = 10000;
diff --git a/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java b/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java
index ee33a6d..8bac8e7 100644
--- a/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-client/src/test/java/org/apache/hyracks/client/stats/ClientCounterContextTest.java
@@ -37,9 +37,9 @@
synchronized (this) {
wait(20000);
}
- String[] counters = { Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.NETWORK_IO_READ,
- Counters.NETWORK_IO_WRITE, Counters.SYSTEM_LOAD, Counters.NUM_PROCESSOR, Counters.DISK_READ,
- Counters.DISK_WRITE };
+ String[] counters =
+ { Counters.MEMORY_USAGE, Counters.MEMORY_MAX, Counters.NETWORK_IO_READ, Counters.NETWORK_IO_WRITE,
+ Counters.SYSTEM_LOAD, Counters.NUM_PROCESSOR, Counters.DISK_READ, Counters.DISK_WRITE };
for (String counterName : counters) {
ICounter counter = ccContext.getCounter(counterName, false);
System.out.println(counterName + ": " + counter.get());
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
index ccf798a..e46aa7f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClientInterfaceIPCI.java
@@ -94,9 +94,8 @@
case DESTROY_JOB:
HyracksClientInterfaceFunctions.UndeployJobSpecFunction dsjf =
(HyracksClientInterfaceFunctions.UndeployJobSpecFunction) fn;
- ccs.getWorkQueue()
- .schedule(new UndeployJobSpecWork(ccs, dsjf.getDeployedJobSpecId(),
- new IPCResponder<>(handle, mid)));
+ ccs.getWorkQueue().schedule(
+ new UndeployJobSpecWork(ccs, dsjf.getDeployedJobSpecId(), new IPCResponder<>(handle, mid)));
break;
case CANCEL_JOB:
HyracksClientInterfaceFunctions.CancelJobFunction cjf =
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
index ad0cb61..06c92dd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerIPCI.java
@@ -101,8 +101,7 @@
case DEPLOYED_JOB_FAILURE:
CCNCFunctions.ReportDeployedJobSpecFailureFunction rdjf =
(CCNCFunctions.ReportDeployedJobSpecFailureFunction) fn;
- ccs.getWorkQueue()
- .schedule(new DeployedJobFailureWork(rdjf.getDeployedJobSpecId(), rdjf.getNodeId()));
+ ccs.getWorkQueue().schedule(new DeployedJobFailureWork(rdjf.getDeployedJobSpecId(), rdjf.getNodeId()));
break;
case REGISTER_PARTITION_PROVIDER:
CCNCFunctions.RegisterPartitionProviderFunction rppf =
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
index 1ec7485..a6edd70 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/ClusterControllerService.java
@@ -171,8 +171,8 @@
final ClusterTopology topology = computeClusterTopology(ccConfig);
ccContext = new ClusterControllerContext(topology);
sweeper = new DeadNodeSweeper();
- datasetDirectoryService = new DatasetDirectoryService(ccConfig.getResultTTL(),
- ccConfig.getResultSweepThreshold());
+ datasetDirectoryService =
+ new DatasetDirectoryService(ccConfig.getResultTTL(), ccConfig.getResultSweepThreshold());
deploymentRunMap = new HashMap<>();
stateDumpRunMap = new HashMap<>();
@@ -208,9 +208,9 @@
clusterIPC = new IPCSystem(new InetSocketAddress(ccConfig.getClusterListenPort()), ccIPCI,
new CCNCFunctions.SerializerDeserializer());
IIPCI ciIPCI = new ClientInterfaceIPCI(this, jobIdFactory);
- clientIPC = new IPCSystem(
- new InetSocketAddress(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort()), ciIPCI,
- new JavaSerializationBasedPayloadSerializerDeserializer());
+ clientIPC =
+ new IPCSystem(new InetSocketAddress(ccConfig.getClientListenAddress(), ccConfig.getClientListenPort()),
+ ciIPCI, new JavaSerializationBasedPayloadSerializerDeserializer());
webServer = new WebServer(this, ccConfig.getConsoleListenPort());
clusterIPC.start();
clientIPC.start();
@@ -238,9 +238,9 @@
// Job manager is in charge of job lifecycle management.
try {
- Constructor<?> jobManagerConstructor = this.getClass().getClassLoader()
- .loadClass(ccConfig.getJobManagerClass())
- .getConstructor(CCConfig.class, ClusterControllerService.class, IJobCapacityController.class);
+ Constructor<?> jobManagerConstructor =
+ this.getClass().getClassLoader().loadClass(ccConfig.getJobManagerClass()).getConstructor(
+ CCConfig.class, ClusterControllerService.class, IJobCapacityController.class);
jobManager = (IJobManager) jobManagerConstructor.newInstance(ccConfig, this, jobCapacityController);
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | NoSuchMethodException
| InvocationTargetException e) {
@@ -272,8 +272,8 @@
private void connectNCs() {
getNCServices().forEach((key, value) -> {
- final TriggerNCWork triggerWork = new TriggerNCWork(ClusterControllerService.this, value.getHostString(),
- value.getPort(), key);
+ final TriggerNCWork triggerWork =
+ new TriggerNCWork(ClusterControllerService.this, value.getHostString(), value.getPort(), key);
executor.submit(triggerWork);
});
}
@@ -428,8 +428,8 @@
@Override
public void getIPAddressNodeMap(Map<InetAddress, Set<String>> map) throws HyracksDataException {
- GetIpAddressNodeNameMapWork ginmw = new GetIpAddressNodeNameMapWork(
- ClusterControllerService.this.getNodeManager(), map);
+ GetIpAddressNodeNameMapWork ginmw =
+ new GetIpAddressNodeNameMapWork(ClusterControllerService.this.getNodeManager(), map);
try {
workQueue.scheduleAndSync(ginmw);
} catch (Exception e) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java
index 1a3051e..0e22c25 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/DeployedJobSpecStore.java
@@ -38,9 +38,8 @@
}
public void addDeployedJobSpecDescriptor(DeployedJobSpecId deployedJobSpecId,
- ActivityClusterGraph activityClusterGraph,
- JobSpecification jobSpecification, Set<Constraint> activityClusterGraphConstraints)
- throws HyracksException {
+ ActivityClusterGraph activityClusterGraph, JobSpecification jobSpecification,
+ Set<Constraint> activityClusterGraphConstraints) throws HyracksException {
if (deployedJobSpecDescriptorMap.get(deployedJobSpecId) != null) {
throw HyracksException.create(ErrorCode.DUPLICATE_DEPLOYED_JOB, deployedJobSpecId);
}
@@ -80,8 +79,8 @@
private final Set<Constraint> activityClusterGraphConstraints;
- private DeployedJobSpecDescriptor(ActivityClusterGraph activityClusterGraph,
- JobSpecification jobSpecification, Set<Constraint> activityClusterGraphConstraints) {
+ private DeployedJobSpecDescriptor(ActivityClusterGraph activityClusterGraph, JobSpecification jobSpecification,
+ Set<Constraint> activityClusterGraphConstraints) {
this.activityClusterGraph = activityClusterGraph;
this.jobSpecification = jobSpecification;
this.activityClusterGraphConstraints = activityClusterGraphConstraints;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
index 742e2e0..98cf67a 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/cluster/NodeManager.java
@@ -98,8 +98,7 @@
}
// Updates the node registry.
if (nodeRegistry.containsKey(nodeId)) {
- LOGGER.warn(
- "Node with name " + nodeId + " has already registered; failing the node then re-registering.");
+ LOGGER.warn("Node with name " + nodeId + " has already registered; failing the node then re-registering.");
removeDeadNode(nodeId);
} else {
try {
@@ -150,8 +149,8 @@
Set<String> deadNodes = new HashSet<>();
Set<JobId> affectedJobIds = new HashSet<>();
Iterator<Map.Entry<String, NodeControllerState>> nodeIterator = nodeRegistry.entrySet().iterator();
- long deadNodeNanosThreshold = TimeUnit.MILLISECONDS
- .toNanos(ccConfig.getHeartbeatMaxMisses() * ccConfig.getHeartbeatPeriodMillis());
+ long deadNodeNanosThreshold =
+ TimeUnit.MILLISECONDS.toNanos(ccConfig.getHeartbeatMaxMisses() * ccConfig.getHeartbeatPeriodMillis());
while (nodeIterator.hasNext()) {
Map.Entry<String, NodeControllerState> entry = nodeIterator.next();
String nodeId = entry.getKey();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
index ea37cdd..3fe88bf 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/executor/ActivityClusterPlanner.java
@@ -102,12 +102,10 @@
ActivityCluster dAC = ac.getActivityClusterGraph().getActivityMap().get(danId);
ActivityClusterPlan dACP = jobRun.getActivityClusterPlanMap().get(dAC.getId());
assert dACP != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for "
- + "dependency AC: Encountered no plan for ActivityID "
- + danId;
+ + "dependency AC: Encountered no plan for ActivityID " + danId;
Task[] dATasks = dACP.getActivityPlanMap().get(danId).getTasks();
assert dATasks != null : "IllegalStateEncountered: Dependent AC is being planned without a plan for"
- + " dependency AC: Encountered no plan for ActivityID "
- + danId;
+ + " dependency AC: Encountered no plan for ActivityID " + danId;
assert dATasks.length == tasks.length : "Dependency activity partitioned differently from "
+ "dependent: " + dATasks.length + " != " + tasks.length;
Task dTask = dATasks[i];
@@ -125,8 +123,8 @@
private TaskCluster[] computeTaskClusters(ActivityCluster ac, JobRun jobRun,
Map<ActivityId, ActivityPlan> activityPlanMap) {
Set<ActivityId> activities = ac.getActivityMap().keySet();
- Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity = computeTaskConnectivity(jobRun,
- activityPlanMap, activities);
+ Map<TaskId, List<Pair<TaskId, ConnectorDescriptorId>>> taskConnectivity =
+ computeTaskConnectivity(jobRun, activityPlanMap, activities);
TaskCluster[] taskClusters = ac.getActivityClusterGraph().isUseConnectorPolicyForScheduling()
? buildConnectorPolicyAwareTaskClusters(ac, activityPlanMap, taskConnectivity)
@@ -139,13 +137,13 @@
List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity.get(tid);
if (cInfoList != null) {
for (Pair<TaskId, ConnectorDescriptorId> p : cInfoList) {
- Task targetTS = activityPlanMap.get(p.getLeft().getActivityId()).getTasks()[p.getLeft()
- .getPartition()];
+ Task targetTS =
+ activityPlanMap.get(p.getLeft().getActivityId()).getTasks()[p.getLeft().getPartition()];
TaskCluster targetTC = targetTS.getTaskCluster();
if (targetTC != tc) {
ConnectorDescriptorId cdId = p.getRight();
- PartitionId pid = new PartitionId(jobRun.getJobId(), cdId, tid.getPartition(), p.getLeft()
- .getPartition());
+ PartitionId pid = new PartitionId(jobRun.getJobId(), cdId, tid.getPartition(),
+ p.getLeft().getPartition());
tc.getProducedPartitions().add(pid);
targetTC.getRequiredPartitions().add(pid);
partitionProducingTaskClusterMap.put(pid, tc);
@@ -170,8 +168,8 @@
Task[] tasks = ap.getTasks();
taskStates.addAll(Arrays.asList(tasks));
}
- TaskCluster tc = new TaskCluster(new TaskClusterId(ac.getId(), 0), ac, taskStates.toArray(new Task[taskStates
- .size()]));
+ TaskCluster tc =
+ new TaskCluster(new TaskClusterId(ac.getId(), 0), ac, taskStates.toArray(new Task[taskStates.size()]));
for (Task t : tc.getTasks()) {
t.setTaskCluster(tc);
}
@@ -209,8 +207,8 @@
}
for (int i = 0; i < nProducers; ++i) {
c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
- List<Pair<TaskId, ConnectorDescriptorId>> cInfoList = taskConnectivity
- .get(ac1TaskStates[i].getTaskId());
+ List<Pair<TaskId, ConnectorDescriptorId>> cInfoList =
+ taskConnectivity.get(ac1TaskStates[i].getTaskId());
if (cInfoList == null) {
cInfoList = new ArrayList<>();
taskConnectivity.put(ac1TaskStates[i].getTaskId(), cInfoList);
@@ -358,9 +356,9 @@
int[] fanouts = new int[nProducers];
if (c.allProducersToAllConsumers()) {
- for (int i = 0; i < nProducers; ++i) {
- fanouts[i] = nConsumers;
- }
+ for (int i = 0; i < nProducers; ++i) {
+ fanouts[i] = nConsumers;
+ }
} else {
for (int i = 0; i < nProducers; ++i) {
c.indicateTargetPartitions(nProducers, nConsumers, i, targetBitmap);
@@ -402,8 +400,8 @@
throw new HyracksException("No value found for " + lv);
}
if (!(value instanceof Number)) {
- throw new HyracksException("Unexpected type of value bound to " + lv + ": " + value.getClass() + "("
- + value + ")");
+ throw new HyracksException(
+ "Unexpected type of value bound to " + lv + ": " + value.getClass() + "(" + value + ")");
}
int nParts = ((Number) value).intValue();
if (nParts <= 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
index 58f44ef..fa08420 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/job/JobRun.java
@@ -98,8 +98,8 @@
private Map<OperatorDescriptorId, Map<Integer, String>> operatorLocations;
- private JobRun(DeploymentId deploymentId, JobId jobId, Set<JobFlag> jobFlags,
- JobSpecification spec, ActivityClusterGraph acg) {
+ private JobRun(DeploymentId deploymentId, JobId jobId, Set<JobFlag> jobFlags, JobSpecification spec,
+ ActivityClusterGraph acg) {
this.deploymentId = deploymentId;
this.jobId = jobId;
this.jobFlags = jobFlags;
@@ -118,10 +118,9 @@
//Run a deployed job spec
public JobRun(ClusterControllerService ccs, DeploymentId deploymentId, JobId jobId, Set<JobFlag> jobFlags,
DeployedJobSpecDescriptor deployedJobSpecDescriptor, Map<byte[], byte[]> jobParameters,
- DeployedJobSpecId deployedJobSpecId)
- throws HyracksException {
- this(deploymentId, jobId, jobFlags,
- deployedJobSpecDescriptor.getJobSpecification(), deployedJobSpecDescriptor.getActivityClusterGraph());
+ DeployedJobSpecId deployedJobSpecId) throws HyracksException {
+ this(deploymentId, jobId, jobFlags, deployedJobSpecDescriptor.getJobSpecification(),
+ deployedJobSpecDescriptor.getActivityClusterGraph());
ccs.createOrGetJobParameterByteStore(jobId).setParameters(jobParameters);
Set<Constraint> constaints = deployedJobSpecDescriptor.getActivityClusterGraphConstraints();
this.scheduler = new JobExecutor(ccs, this, constaints, deployedJobSpecId);
@@ -252,7 +251,7 @@
return connectorPolicyMap;
}
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode result = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
index c5e51a6..6f5c5ad 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/partitions/PartitionMatchMaker.java
@@ -49,7 +49,8 @@
public List<Pair<PartitionDescriptor, PartitionRequest>> registerPartitionDescriptor(
PartitionDescriptor partitionDescriptor) {
- List<Pair<PartitionDescriptor, PartitionRequest>> matches = new ArrayList<Pair<PartitionDescriptor, PartitionRequest>>();
+ List<Pair<PartitionDescriptor, PartitionRequest>> matches =
+ new ArrayList<Pair<PartitionDescriptor, PartitionRequest>>();
PartitionId pid = partitionDescriptor.getPartitionId();
boolean matched = false;
List<PartitionRequest> requests = partitionRequests.get(pid);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java
index 69f0571..9430c0d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/web/util/JSONOutputRequestUtil.java
@@ -23,7 +23,8 @@
public class JSONOutputRequestUtil {
- private JSONOutputRequestUtil() {}
+ private JSONOutputRequestUtil() {
+ }
public static URI uri(String host, String prefix, String path) throws URISyntaxException {
String name = host;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
index 3babf00..d0c6567 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/AbstractTaskLifecycleWork.java
@@ -58,8 +58,8 @@
Map<ActivityId, ActivityCluster> activityClusterMap = run.getActivityClusterGraph().getActivityMap();
ActivityCluster ac = activityClusterMap.get(tid.getActivityId());
if (ac != null) {
- Map<ActivityId, ActivityPlan> taskStateMap = run.getActivityClusterPlanMap().get(ac.getId())
- .getActivityPlanMap();
+ Map<ActivityId, ActivityPlan> taskStateMap =
+ run.getActivityClusterPlanMap().get(ac.getId()).getActivityPlanMap();
Task[] taskStates = taskStateMap.get(tid.getActivityId()).getTasks();
if (taskStates != null && taskStates.length > tid.getPartition()) {
Task ts = taskStates[tid.getPartition()];
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java
index c0ecffb..4962607 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/CliDeployBinaryWork.java
@@ -65,8 +65,8 @@
/**
* Deploy for the cluster controller
*/
- DeploymentUtils.deploy(deploymentId, binaryURLs, ccs.getContext()
- .getJobSerializerDeserializerContainer(), ccs.getServerContext(), false);
+ DeploymentUtils.deploy(deploymentId, binaryURLs, ccs.getContext().getJobSerializerDeserializerContainer(),
+ ccs.getServerContext(), false);
/**
* Deploy for the node controllers
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
index b44c58c..a7c3c2f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ClusterShutdownWork.java
@@ -41,7 +41,7 @@
private final IResultCallback<Boolean> callback;
public ClusterShutdownWork(ClusterControllerService ncs, boolean terminateNCService,
- IResultCallback<Boolean> callback) {
+ IResultCallback<Boolean> callback) {
this.ccs = ncs;
this.terminateNCService = terminateNCService;
this.callback = callback;
@@ -77,8 +77,8 @@
/*
* best effort - just exit, user will have to kill misbehaving NCs
*/
- LOGGER.error("Clean shutdown of NCs timed out- giving up; unresponsive nodes: " +
- shutdownStatus.getRemainingNodes());
+ LOGGER.error("Clean shutdown of NCs timed out- giving up; unresponsive nodes: "
+ + shutdownStatus.getRemainingNodes());
}
callback.setValue(cleanShutdown);
ccs.stop(terminateNCService);
@@ -97,8 +97,8 @@
LOGGER.info("Notifying NC " + nodeId + " to shutdown...");
ncState.getNodeController().shutdown(terminateNCService);
} catch (Exception e) {
- LOGGER.log(Level.INFO,
- "Exception shutting down NC " + nodeId + " (possibly dead?), continuing shutdown...", e);
+ LOGGER.log(Level.INFO, "Exception shutting down NC " + nodeId + " (possibly dead?), continuing shutdown...",
+ e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java
index f7335a8..c51f3c5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/DeployJobSpecWork.java
@@ -59,8 +59,7 @@
acggf.createActivityClusterGraphGenerator(ccServiceCtx, EnumSet.noneOf(JobFlag.class));
ActivityClusterGraph acg = acgg.initialize();
ccs.getDeployedJobSpecStore().addDeployedJobSpecDescriptor(deployedJobSpecId, acg,
- acggf.getJobSpecification(),
- acgg.getConstraints());
+ acggf.getJobSpecification(), acgg.getConstraints());
byte[] acgBytes = JavaSerializationUtils.serialize(acg);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java
index 8fe6470..009e445 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobInfoWork.java
@@ -40,8 +40,8 @@
protected void doRun() throws Exception {
try {
JobRun run = jobManager.get(jobId);
- JobInfo info = (run != null) ? new JobInfo(run.getJobId(), run.getStatus(), run.getOperatorLocations())
- : null;
+ JobInfo info =
+ (run != null) ? new JobInfo(run.getJobId(), run.getStatus(), run.getOperatorLocations()) : null;
callback.setValue(info);
} catch (Exception e) {
callback.setException(e);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
index 9c680c3..ccd8286 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetJobSummariesJSONWork.java
@@ -46,7 +46,7 @@
populateJSON(jobManager.getArchivedJobs());
}
- private void populateJSON(Collection<JobRun> jobRuns) {
+ private void populateJSON(Collection<JobRun> jobRuns) {
ObjectMapper om = new ObjectMapper();
for (JobRun run : jobRuns) {
ObjectNode jo = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
index 6433223..517f56f 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetNodeDetailsJSONWork.java
@@ -44,8 +44,8 @@
import org.apache.hyracks.control.common.work.SynchronizableWork;
public class GetNodeDetailsJSONWork extends SynchronizableWork {
- private static final Section [] CC_SECTIONS = { Section.CC, Section.COMMON };
- private static final Section [] NC_SECTIONS = { Section.NC, Section.COMMON };
+ private static final Section[] CC_SECTIONS = { Section.CC, Section.COMMON };
+ private static final Section[] NC_SECTIONS = { Section.NC, Section.COMMON };
private final INodeManager nodeManager;
private final CCConfig ccConfig;
@@ -153,7 +153,6 @@
return o;
}
-
public ObjectNode getDetail() {
return detail;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
index e1b59e1..b064e52 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/GetThreadDumpWork.java
@@ -42,7 +42,6 @@
private final IResultCallback<String> callback;
private final ThreadDumpRun run;
-
public GetThreadDumpWork(ClusterControllerService ccs, String nodeId, IResultCallback<String> callback) {
this.ccs = ccs;
this.nodeId = nodeId;
@@ -83,8 +82,8 @@
Thread.sleep(sleepTime);
}
if (ccs.removeThreadDumpRun(run.getRequestId()) != null) {
- LOGGER.log(Level.WARN, "Timed out thread dump request " + run.getRequestId()
- + " for node " + nodeId);
+ LOGGER.log(Level.WARN,
+ "Timed out thread dump request " + run.getRequestId() + " for node " + nodeId);
callback.setException(new TimeoutException("Thread dump request for node " + nodeId
+ " timed out after " + TIMEOUT_SECS + " seconds."));
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
index b3b33c9..cc37f9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/JobletCleanupNotificationWork.java
@@ -58,8 +58,8 @@
}
Set<String> cleanupPendingNodes = run.getCleanupPendingNodeIds();
if (!cleanupPendingNodes.remove(nodeId)) {
- LOGGER.log(Level.WARN, () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes +
- " for job " + jobId);
+ LOGGER.log(Level.WARN,
+ () -> nodeId + " not in pending cleanup nodes set: " + cleanupPendingNodes + " for job " + jobId);
return;
}
INodeManager nodeManager = ccs.getNodeManager();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
index 04a34af..77ecbee 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterNodeWork.java
@@ -56,9 +56,8 @@
Map<IOption, Object> ncConfiguration = new HashMap<>();
try {
LOGGER.log(Level.WARN, "Registering INodeController: id = " + id);
- NodeControllerRemoteProxy nc =
- new NodeControllerRemoteProxy(ccs.getCcId(),
- ccs.getClusterIPC().getReconnectingHandle(reg.getNodeControllerAddress()));
+ NodeControllerRemoteProxy nc = new NodeControllerRemoteProxy(ccs.getCcId(),
+ ccs.getClusterIPC().getReconnectingHandle(reg.getNodeControllerAddress()));
NodeControllerState state = new NodeControllerState(nc, reg);
INodeManager nodeManager = ccs.getNodeManager();
nodeManager.addNode(id, state);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
index edc57fb..23a81af 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/RegisterPartitionAvailibilityWork.java
@@ -49,8 +49,8 @@
return;
}
PartitionMatchMaker pmm = run.getPartitionMatchMaker();
- List<Pair<PartitionDescriptor, PartitionRequest>> matches = pmm
- .registerPartitionDescriptor(partitionDescriptor);
+ List<Pair<PartitionDescriptor, PartitionRequest>> matches =
+ pmm.registerPartitionDescriptor(partitionDescriptor);
for (Pair<PartitionDescriptor, PartitionRequest> match : matches) {
try {
PartitionUtils.reportPartitionMatch(ccs, pid, match);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
index aef331f..ad8882d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/ShutdownNCServiceWork.java
@@ -47,6 +47,7 @@
this.ncPort = ncPort;
this.ncId = ncId;
}
+
@Override
public final void doRun() {
LOGGER.info("Connecting to NC service '" + ncId + "' at " + ncHost + ":" + ncPort);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
index 2f80f5b..aa7a4fe 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-cc/src/main/java/org/apache/hyracks/control/cc/work/TriggerNCWork.java
@@ -54,6 +54,7 @@
this.ncPort = ncPort;
this.ncId = ncId;
}
+
@Override
public final void run() {
ccs.getExecutor().execute(() -> {
@@ -68,8 +69,8 @@
return;
// QQQ Should probably have an ACK here
} catch (IOException e) {
- LOGGER.log(Level.WARN, "Failed to contact NC service at " + ncHost + ":" + ncPort
- + "; will retry", e);
+ LOGGER.log(Level.WARN, "Failed to contact NC service at " + ncHost + ":" + ncPort + "; will retry",
+ e);
}
try {
Thread.sleep(5000);
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
index 92e90e7..44f57fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/application/ConfigManagerApplicationConfig.java
@@ -42,17 +42,17 @@
@Override
public String getString(String section, String key) {
- return (String)get(section, key);
+ return (String) get(section, key);
}
@Override
public int getInt(String section, String key) {
- return (int)get(section, key);
+ return (int) get(section, key);
}
@Override
public long getLong(String section, String key) {
- return (long)get(section, key);
+ return (long) get(section, key);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
index 2307185..9cf84dd 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/IClusterController.java
@@ -40,8 +40,7 @@
void unregisterNode(String nodeId) throws Exception;
- void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
- throws Exception;
+ void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics) throws Exception;
void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, List<Exception> exceptions)
throws Exception;
@@ -66,8 +65,8 @@
void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception;
- void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult,
- boolean emptyResult, int partition, int nPartitions, NetworkAddress networkAddress) throws Exception;
+ void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult, boolean emptyResult,
+ int partition, int nPartitions, NetworkAddress networkAddress) throws Exception;
void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws Exception;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
index ef3b27c..9ec55f4 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/base/INodeController.java
@@ -37,8 +37,8 @@
public interface INodeController {
void startTasks(DeploymentId deploymentId, JobId jobId, byte[] planBytes,
- List<TaskAttemptDescriptor> taskDescriptors, Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies,
- Set<JobFlag> flags, Map<byte[], byte[]> jobParameters, DeployedJobSpecId deployedJobSpecId)
+ List<TaskAttemptDescriptor> taskDescriptors, Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies,
+ Set<JobFlag> flags, Map<byte[], byte[]> jobParameters, DeployedJobSpecId deployedJobSpecId)
throws Exception;
void abortTasks(JobId jobId, List<TaskAttemptId> tasks) throws Exception;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
index 67ea33f..986ca96 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigManager.java
@@ -69,8 +69,8 @@
private HashSet<IOption> registeredOptions = new HashSet<>();
private HashMap<IOption, Object> definedMap = new HashMap<>();
private HashMap<IOption, Object> defaultMap = new HashMap<>();
- private CompositeMap<IOption, Object> configurationMap = new CompositeMap<>(definedMap, defaultMap,
- new NoOpMapMutator());
+ private CompositeMap<IOption, Object> configurationMap =
+ new CompositeMap<>(definedMap, defaultMap, new NoOpMapMutator());
private EnumMap<Section, Map<String, IOption>> sectionMap = new EnumMap<>(Section.class);
@SuppressWarnings("squid:S1948") // TreeMap is serializable, and therefore so is its synchronized map
private Map<String, Map<IOption, Object>> nodeSpecificMap = Collections.synchronizedMap(new TreeMap<>());
@@ -474,8 +474,7 @@
}
});
extensionOptions.forEach((extension, options) -> {
- options.forEach(option -> ini
- .add(extension, option.getKey(), option.getValue()));
+ options.forEach(option -> ini.add(extension, option.getKey(), option.getValue()));
});
return ini;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
index adf1774..4fa9b56 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/ConfigUtils.java
@@ -150,25 +150,23 @@
return value;
}
- public static String getString(Ini ini, org.apache.hyracks.api.config.Section section,
- IOption option, String defaultValue) {
+ public static String getString(Ini ini, org.apache.hyracks.api.config.Section section, IOption option,
+ String defaultValue) {
return getString(ini, section.sectionName(), option.ini(), defaultValue);
}
public static void addConfigToJSON(ObjectNode o, IApplicationConfig cfg,
- org.apache.hyracks.api.config.Section... sections) {
+ org.apache.hyracks.api.config.Section... sections) {
ArrayNode configArray = o.putArray("config");
for (org.apache.hyracks.api.config.Section section : cfg.getSections(Arrays.asList(sections)::contains)) {
ObjectNode sectionNode = configArray.addObject();
Map<String, Object> sectionConfig = getSectionOptionsForJSON(cfg, section, option -> true);
- sectionNode.put("section", section.sectionName())
- .putPOJO("properties", sectionConfig);
+ sectionNode.put("section", section.sectionName()).putPOJO("properties", sectionConfig);
}
}
public static Map<String, Object> getSectionOptionsForJSON(IApplicationConfig cfg,
- org.apache.hyracks.api.config.Section section,
- Predicate<IOption> selector) {
+ org.apache.hyracks.api.config.Section section, Predicate<IOption> selector) {
Map<String, Object> sectionConfig = new TreeMap<>();
for (IOption option : cfg.getOptions(section)) {
if (selector.test(option)) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
index 42ed1e7..3807a00 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/config/OptionTypes.java
@@ -37,8 +37,7 @@
}
long result1 = StorageUtil.getByteValue(s);
if (result1 > Integer.MAX_VALUE || result1 < Integer.MIN_VALUE) {
- throw new IllegalArgumentException(
- "The given value: " + result1 + " is not within the int range.");
+ throw new IllegalArgumentException("The given value: " + result1 + " is not within the int range.");
}
return (int) result1;
}
@@ -50,12 +49,12 @@
@Override
public String serializeToHumanReadable(Object value) {
- return value + " (" + StorageUtil.toHumanReadableSize((int)value) + ")";
+ return value + " (" + StorageUtil.toHumanReadableSize((int) value) + ")";
}
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (int)value);
+ node.put(fieldName, (int) value);
}
};
@@ -72,12 +71,12 @@
@Override
public String serializeToHumanReadable(Object value) {
- return value + " (" + StorageUtil.toHumanReadableSize((long)value) + ")";
+ return value + " (" + StorageUtil.toHumanReadableSize((long) value) + ")";
}
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (long)value);
+ node.put(fieldName, (long) value);
}
};
@@ -88,7 +87,7 @@
if (Integer.highestOneBit(value) > 16) {
throw new IllegalArgumentException("The given value " + s + " is too big for a short");
}
- return (short)value;
+ return (short) value;
}
@Override
@@ -98,7 +97,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (short)value);
+ node.put(fieldName, (short) value);
}
};
@@ -115,7 +114,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (int)value);
+ node.put(fieldName, (int) value);
}
};
@@ -132,7 +131,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (double)value);
+ node.put(fieldName, (double) value);
}
};
@@ -149,7 +148,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (String)value);
+ node.put(fieldName, (String) value);
}
};
@@ -166,7 +165,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (long)value);
+ node.put(fieldName, (long) value);
}
};
@@ -183,7 +182,7 @@
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, (boolean)value);
+ node.put(fieldName, (boolean) value);
}
};
@@ -207,12 +206,12 @@
@Override
public String serializeToJSON(Object value) {
- return value == null ? null : ((Level)value).name();
+ return value == null ? null : ((Level) value).name();
}
@Override
public String serializeToIni(Object value) {
- return ((Level)value).name();
+ return ((Level) value).name();
}
@Override
@@ -221,25 +220,25 @@
}
};
- public static final IOptionType<String []> STRING_ARRAY = new IOptionType<String []>() {
+ public static final IOptionType<String[]> STRING_ARRAY = new IOptionType<String[]>() {
@Override
- public String [] parse(String s) {
+ public String[] parse(String s) {
return s == null ? null : s.split("\\s*,\\s*");
}
@Override
- public Class<String []> targetType() {
- return String [].class;
+ public Class<String[]> targetType() {
+ return String[].class;
}
@Override
public String serializeToIni(Object value) {
- return String.join(",", (String [])value);
+ return String.join(",", (String[]) value);
}
@Override
public void serializeJSONField(String fieldName, Object value, ObjectNode node) {
- node.put(fieldName, value == null ? null : StringUtils.join((String [])value, ','));
+ node.put(fieldName, value == null ? null : StringUtils.join((String[]) value, ','));
}
};
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
index 85731b6..13e4504 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/CCConfig.java
@@ -60,17 +60,14 @@
RESULT_TTL(LONG, 86400000L), // TODO(mblow): add time unit
RESULT_SWEEP_THRESHOLD(LONG, 60000L), // TODO(mblow): add time unit
@SuppressWarnings("RedundantCast") // not redundant- false positive from IDEA
- ROOT_DIR(STRING, (Function<IApplicationConfig, String>) appConfig ->
- FileUtil.joinPath(appConfig.getString(ControllerConfig.Option.DEFAULT_DIR),
- "ClusterControllerService"), "<value of " + ControllerConfig.Option.DEFAULT_DIR.cmdline() +
- ">/ClusterControllerService"),
+ ROOT_DIR(STRING, (Function<IApplicationConfig, String>) appConfig -> FileUtil.joinPath(appConfig.getString(ControllerConfig.Option.DEFAULT_DIR), "ClusterControllerService"), "<value of " + ControllerConfig.Option.DEFAULT_DIR.cmdline() + ">/ClusterControllerService"),
CLUSTER_TOPOLOGY(STRING),
JOB_QUEUE_CLASS(STRING, "org.apache.hyracks.control.cc.scheduler.FIFOJobQueue"),
JOB_QUEUE_CAPACITY(INTEGER, 4096),
JOB_MANAGER_CLASS(STRING, "org.apache.hyracks.control.cc.job.JobManager"),
ENFORCE_FRAME_WRITER_PROTOCOL(BOOLEAN, false),
CORES_MULTIPLIER(INTEGER, 3),
- CONTROLLER_ID(SHORT, (short)0x0000);
+ CONTROLLER_ID(SHORT, (short) 0x0000);
private final IOptionType parser;
private Object defaultValue;
@@ -93,7 +90,7 @@
}
<T> Option(IOptionType<T> parser, Function<IApplicationConfig, T> defaultValue,
- String defaultValueDescription) {
+ String defaultValueDescription) {
this.parser = parser;
this.defaultValue = defaultValue;
this.defaultValueDescription = defaultValueDescription;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
index 19c89e0..8ecd312 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ControllerConfig.java
@@ -33,6 +33,7 @@
public class ControllerConfig implements Serializable {
private static final long serialVersionUID = 1L;
+
public enum Option implements IOption {
CONFIG_FILE(OptionTypes.STRING, "Specify path to master configuration file", null),
CONFIG_FILE_URL(OptionTypes.URL, "Specify URL to master configuration file", null),
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
index 95c063f..519bafc 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NCConfig.java
@@ -50,7 +50,7 @@
NCSERVICE_PORT(INTEGER, 9090),
CLUSTER_ADDRESS(STRING, (String) null),
CLUSTER_PORT(INTEGER, 1099),
- CLUSTER_CONTROLLER_ID(SHORT, (short)0x0000),
+ CLUSTER_CONTROLLER_ID(SHORT, (short) 0x0000),
CLUSTER_PUBLIC_ADDRESS(STRING, PUBLIC_ADDRESS),
CLUSTER_PUBLIC_PORT(INTEGER, CLUSTER_LISTEN_PORT),
NODE_ID(STRING, (String) null),
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java
index 89d6e78..75ef0b7 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/NodeRegistration.java
@@ -75,11 +75,10 @@
private final long maxJobId;
public NodeRegistration(InetSocketAddress ncAddress, String nodeId, NCConfig ncConfig, NetworkAddress dataPort,
- NetworkAddress datasetPort, String osName, String arch, String osVersion, int nProcessors,
- String vmName, String vmVersion, String vmVendor, String classpath, String libraryPath,
- String bootClasspath, List<String> inputArguments, Map<String, String> systemProperties,
- HeartbeatSchema hbSchema, NetworkAddress messagingPort, NodeCapacity capacity, int pid,
- long maxJobId) {
+ NetworkAddress datasetPort, String osName, String arch, String osVersion, int nProcessors, String vmName,
+ String vmVersion, String vmVendor, String classpath, String libraryPath, String bootClasspath,
+ List<String> inputArguments, Map<String, String> systemProperties, HeartbeatSchema hbSchema,
+ NetworkAddress messagingPort, NodeCapacity capacity, int pid, long maxJobId) {
this.ncAddress = ncAddress;
this.nodeId = nodeId;
this.ncConfig = ncConfig;
@@ -184,7 +183,9 @@
return messagingPort;
}
- public int getPid() { return pid; }
+ public int getPid() {
+ return pid;
+ }
public long getMaxJobId() {
return maxJobId;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java
index 1b790b7..4a44356 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/controllers/ServiceConstants.java
@@ -23,5 +23,6 @@
START_NC,
TERMINATE
}
+
public static final String NC_SERVICE_MAGIC_COOKIE = "hyncmagic2";
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java
index bb65f7f..4d8c137 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/deployment/DeploymentUtils.java
@@ -119,8 +119,8 @@
throws HyracksException {
try {
IJobSerializerDeserializerContainer jobSerDeContainer = serviceCtx.getJobSerializerDeserializerContainer();
- IJobSerializerDeserializer jobSerDe = deploymentId == null ? null
- : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
+ IJobSerializerDeserializer jobSerDe =
+ deploymentId == null ? null : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
return jobSerDe == null ? JavaSerializationUtils.deserialize(bytes) : jobSerDe.deserialize(bytes);
} catch (Exception e) {
throw new HyracksException(e);
@@ -140,8 +140,8 @@
throws HyracksException {
try {
IJobSerializerDeserializerContainer jobSerDeContainer = serviceCtx.getJobSerializerDeserializerContainer();
- IJobSerializerDeserializer jobSerDe = deploymentId == null ? null
- : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
+ IJobSerializerDeserializer jobSerDe =
+ deploymentId == null ? null : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
return jobSerDe == null ? JavaSerializationUtils.loadClass(className) : jobSerDe.loadClass(className);
} catch (ClassNotFoundException | IOException e) {
throw new HyracksException(e);
@@ -159,8 +159,8 @@
public static ClassLoader getClassLoader(DeploymentId deploymentId, IServiceContext appCtx)
throws HyracksException {
IJobSerializerDeserializerContainer jobSerDeContainer = appCtx.getJobSerializerDeserializerContainer();
- IJobSerializerDeserializer jobSerDe = deploymentId == null ? null
- : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
+ IJobSerializerDeserializer jobSerDe =
+ deploymentId == null ? null : jobSerDeContainer.getJobSerializerDeserializer(deploymentId);
return jobSerDe == null ? DeploymentUtils.class.getClassLoader() : jobSerDe.getClassLoader();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
index e4e2dbe..0fdafe3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/ClusterControllerRemoteProxy.java
@@ -80,16 +80,14 @@
@Override
public void notifyTaskComplete(JobId jobId, TaskAttemptId taskId, String nodeId, TaskProfile statistics)
throws Exception {
- NotifyTaskCompleteFunction fn = new NotifyTaskCompleteFunction(jobId, taskId,
- nodeId, statistics);
+ NotifyTaskCompleteFunction fn = new NotifyTaskCompleteFunction(jobId, taskId, nodeId, statistics);
ipcHandle.send(-1, fn, null);
}
@Override
public void notifyTaskFailure(JobId jobId, TaskAttemptId taskId, String nodeId, List<Exception> exceptions)
throws Exception {
- NotifyTaskFailureFunction fn = new NotifyTaskFailureFunction(jobId, taskId, nodeId,
- exceptions);
+ NotifyTaskFailureFunction fn = new NotifyTaskFailureFunction(jobId, taskId, nodeId, exceptions);
ipcHandle.send(-1, fn, null);
}
@@ -101,8 +99,7 @@
@Override
public void notifyDeployBinary(DeploymentId deploymentId, String nodeId, DeploymentStatus status) throws Exception {
- NotifyDeployBinaryFunction fn = new NotifyDeployBinaryFunction(deploymentId, nodeId,
- status);
+ NotifyDeployBinaryFunction fn = new NotifyDeployBinaryFunction(deploymentId, nodeId, status);
ipcHandle.send(-1, fn, null);
}
@@ -120,37 +117,34 @@
@Override
public void registerPartitionProvider(PartitionDescriptor partitionDescriptor) throws Exception {
- RegisterPartitionProviderFunction fn = new RegisterPartitionProviderFunction(
- partitionDescriptor);
+ RegisterPartitionProviderFunction fn = new RegisterPartitionProviderFunction(partitionDescriptor);
ipcHandle.send(-1, fn, null);
}
@Override
public void registerPartitionRequest(PartitionRequest partitionRequest) throws Exception {
- RegisterPartitionRequestFunction fn = new RegisterPartitionRequestFunction(
- partitionRequest);
+ RegisterPartitionRequestFunction fn = new RegisterPartitionRequestFunction(partitionRequest);
ipcHandle.send(-1, fn, null);
}
@Override
public void sendApplicationMessageToCC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception {
- SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data,
- deploymentId, nodeId);
+ SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data, deploymentId, nodeId);
ipcHandle.send(-1, fn, null);
}
@Override
public void registerResultPartitionLocation(JobId jobId, ResultSetId rsId, boolean orderedResult,
boolean emptyResult, int partition, int nPartitions, NetworkAddress networkAddress) throws Exception {
- RegisterResultPartitionLocationFunction fn = new RegisterResultPartitionLocationFunction(
- jobId, rsId, orderedResult, emptyResult, partition, nPartitions, networkAddress);
+ RegisterResultPartitionLocationFunction fn = new RegisterResultPartitionLocationFunction(jobId, rsId,
+ orderedResult, emptyResult, partition, nPartitions, networkAddress);
ipcHandle.send(-1, fn, null);
}
@Override
public void reportResultPartitionWriteCompletion(JobId jobId, ResultSetId rsId, int partition) throws Exception {
- ReportResultPartitionWriteCompletionFunction fn = new ReportResultPartitionWriteCompletionFunction(
- jobId, rsId, partition);
+ ReportResultPartitionWriteCompletionFunction fn =
+ new ReportResultPartitionWriteCompletionFunction(jobId, rsId, partition);
ipcHandle.send(-1, fn, null);
}
@@ -167,8 +161,7 @@
@Override
public void notifyStateDump(String nodeId, String stateDumpId, String state) throws Exception {
- StateDumpResponseFunction fn = new StateDumpResponseFunction(nodeId, stateDumpId,
- state);
+ StateDumpResponseFunction fn = new StateDumpResponseFunction(nodeId, stateDumpId, state);
ipcHandle.send(-1, fn, null);
}
@@ -180,8 +173,7 @@
@Override
public void notifyThreadDump(String nodeId, String requestId, String threadDumpJSON) throws Exception {
- ThreadDumpResponseFunction tdrf = new ThreadDumpResponseFunction(nodeId, requestId,
- threadDumpJSON);
+ ThreadDumpResponseFunction tdrf = new ThreadDumpResponseFunction(nodeId, requestId, threadDumpJSON);
ipcHandle.send(-1, tdrf, null);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
index a09a8bc..429cb26 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/ipc/NodeControllerRemoteProxy.java
@@ -65,8 +65,8 @@
List<TaskAttemptDescriptor> taskDescriptors, Map<ConnectorDescriptorId, IConnectorPolicy> connectorPolicies,
Set<JobFlag> flags, Map<byte[], byte[]> jobParameters, DeployedJobSpecId deployedJobSpecId)
throws Exception {
- StartTasksFunction stf = new StartTasksFunction(deploymentId, jobId, planBytes,
- taskDescriptors, connectorPolicies, flags, jobParameters, deployedJobSpecId);
+ StartTasksFunction stf = new StartTasksFunction(deploymentId, jobId, planBytes, taskDescriptors,
+ connectorPolicies, flags, jobParameters, deployedJobSpecId);
ipcHandle.send(-1, stf, null);
}
@@ -84,8 +84,7 @@
@Override
public void reportPartitionAvailability(PartitionId pid, NetworkAddress networkAddress) throws Exception {
- ReportPartitionAvailabilityFunction rpaf = new ReportPartitionAvailabilityFunction(
- pid, networkAddress);
+ ReportPartitionAvailabilityFunction rpaf = new ReportPartitionAvailabilityFunction(pid, networkAddress);
ipcHandle.send(-1, rpaf, null);
}
@@ -127,8 +126,7 @@
@Override
public void sendApplicationMessageToNC(byte[] data, DeploymentId deploymentId, String nodeId) throws Exception {
- SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data,
- deploymentId, nodeId);
+ SendApplicationMessageFunction fn = new SendApplicationMessageFunction(data, deploymentId, nodeId);
ipcHandle.send(-1, fn, null);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
index 90dfc8c..bd98200 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/AbstractProfile.java
@@ -45,7 +45,7 @@
return counters;
}
- public abstract ObjectNode toJSON() ;
+ public abstract ObjectNode toJSON();
protected void populateCounters(ObjectNode jo) {
ObjectMapper om = new ObjectMapper();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
index 64d074b..c4eff85 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobProfile.java
@@ -62,7 +62,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
index 5bdb1b5..687874c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/JobletProfile.java
@@ -62,7 +62,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
index 3b54887..f977654 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/job/profiling/om/TaskProfile.java
@@ -70,7 +70,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode json = om.createObjectNode();
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java
index eae2eb6..e210963 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/shutdown/ShutdownRun.java
@@ -24,7 +24,7 @@
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
-public class ShutdownRun implements IShutdownStatusConditionVariable{
+public class ShutdownRun implements IShutdownStatusConditionVariable {
private final Set<String> shutdownNodeIds = new TreeSet<>();
private boolean shutdownSuccess = false;
@@ -60,7 +60,7 @@
return shutdownSuccess;
}
- public synchronized Set<String> getRemainingNodes(){
+ public synchronized Set<String> getRemainingNodes() {
return shutdownNodeIds;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
index dbcba99..8ed7c9e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-common/src/main/java/org/apache/hyracks/control/common/work/WorkQueue.java
@@ -116,9 +116,8 @@
break;
}
if (DEBUG) {
- LOGGER.log(Level.TRACE,
- "Dequeue (" + WorkQueue.this.hashCode() + "): " + dequeueCount.incrementAndGet() + "/"
- + enqueueCount);
+ LOGGER.log(Level.TRACE, "Dequeue (" + WorkQueue.this.hashCode() + "): "
+ + dequeueCount.incrementAndGet() + "/" + enqueueCount);
}
if (LOGGER.isEnabled(r.logLevel())) {
LOGGER.log(r.logLevel(), "Executing: " + r);
@@ -141,8 +140,7 @@
if (waitedDelta > 0 || blockedDelta > 0) {
LOGGER.warn("Work " + r + " waited " + waitedDelta + " times (~"
+ (after.getWaitedTime() - before.getWaitedTime()) + "ms), blocked " + blockedDelta
- + " times (~" + (after.getBlockedTime() - before.getBlockedTime()) + "ms)"
- );
+ + " times (~" + (after.getBlockedTime() - before.getBlockedTime()) + "ms)");
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
index 24d72f8..0e74a4c 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/NodeControllerService.java
@@ -302,8 +302,8 @@
messagingNetManager.start();
}
- final InetSocketAddress ccAddress = new InetSocketAddress(ncConfig.getClusterAddress(),
- ncConfig.getClusterPort());
+ final InetSocketAddress ccAddress =
+ new InetSocketAddress(ncConfig.getClusterAddress(), ncConfig.getClusterPort());
this.primaryCcs = addCc(ncConfig.getClusterControllerId(), ccAddress);
workQueue.start();
@@ -390,8 +390,8 @@
NetworkAddress messagingAddress =
messagingNetManager != null ? messagingNetManager.getPublicNetworkAddress() : null;
int allCores = osMXBean.getAvailableProcessors();
- nodeRegistration = new NodeRegistration(ncAddress, id, ncConfig, netAddress, datasetAddress,
- osMXBean.getName(), osMXBean.getArch(), osMXBean.getVersion(), allCores, runtimeMXBean.getVmName(),
+ nodeRegistration = new NodeRegistration(ncAddress, id, ncConfig, netAddress, datasetAddress, osMXBean.getName(),
+ osMXBean.getArch(), osMXBean.getVersion(), allCores, runtimeMXBean.getVmName(),
runtimeMXBean.getVmVersion(), runtimeMXBean.getVmVendor(), runtimeMXBean.getClassPath(),
runtimeMXBean.getLibraryPath(), runtimeMXBean.getBootClassPath(), runtimeMXBean.getInputArguments(),
runtimeMXBean.getSystemProperties(), hbSchema, messagingAddress, application.getCapacity(),
@@ -403,8 +403,8 @@
// Start heartbeat generator.
if (!heartbeatThreads.containsKey(ccs)) {
- Thread heartbeatThread = new Thread(new HeartbeatTask(ccs, nodeParameters.getHeartbeatPeriod()),
- id + "-Heartbeat");
+ Thread heartbeatThread =
+ new Thread(new HeartbeatTask(ccs, nodeParameters.getHeartbeatPeriod()), id + "-Heartbeat");
heartbeatThread.setPriority(Thread.MAX_PRIORITY);
heartbeatThread.setDaemon(true);
heartbeatThread.start();
@@ -529,7 +529,6 @@
return jpbs;
}
-
public void storeActivityClusterGraph(DeployedJobSpecId deployedJobSpecId, ActivityClusterGraph acg)
throws HyracksException {
if (deployedJobSpecActivityClusterGraphMap.get(deployedJobSpecId.getId()) != null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
index 07bb504..6e5a58e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/Task.java
@@ -460,6 +460,7 @@
public byte[] getJobParameter(byte[] name, int start, int length) throws HyracksException {
return ncs.createOrGetJobParameterByteStore(joblet.getJobId()).getParameterValue(name, start, length);
}
+
public Set<JobFlag> getJobFlags() {
return jobFlags;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
index 24edeb2..8c4fcb0 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/DatasetPartitionReader.java
@@ -89,9 +89,8 @@
}
private long read(long offset, ByteBuffer buffer) throws HyracksDataException {
- return datasetMemoryManager != null ?
- resultState.read(datasetMemoryManager, offset, buffer) :
- resultState.read(offset, buffer);
+ return datasetMemoryManager != null ? resultState.read(datasetMemoryManager, offset, buffer)
+ : resultState.read(offset, buffer);
}
private void close() {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java
index afce266..43e3409 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/dataset/ResultState.java
@@ -214,7 +214,7 @@
initReadFileHandle();
}
readSize = ioManager.syncRead(readFileHandle, offset, buffer);
- if (readSize < 0){
+ if (readSize < 0) {
throw new HyracksDataException("Premature end of file");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java
index 5380911..1f8669d 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/io/profiling/IOCounterDefault.java
@@ -19,7 +19,7 @@
package org.apache.hyracks.control.nc.io.profiling;
-public class IOCounterDefault implements IIOCounter{
+public class IOCounterDefault implements IIOCounter {
@Override
public long getReads() {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
index 4787a50..54ac5e5 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializedPartitionWriter.java
@@ -97,8 +97,8 @@
}
if (!failed) {
manager.registerPartition(pid, ctx.getJobletContext().getJobId().getCcId(), taId,
- new MaterializedPartition(ctx, fRef, executor, ctx.getIoManager()),
- PartitionState.COMMITTED, taId.getAttempt() == 0 ? false : true);
+ new MaterializedPartition(ctx, fRef, executor, ctx.getIoManager()), PartitionState.COMMITTED,
+ taId.getAttempt() == 0 ? false : true);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
index 147606d..a782bca 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/MaterializingPipelinedPartition.java
@@ -99,8 +99,8 @@
fRefCopy = fRef;
}
writer.open();
- IFileHandle readHandle = fRefCopy == null ? null :
- ioManager.open(fRefCopy, IIOManager.FileReadWriteMode.READ_ONLY,
+ IFileHandle readHandle = fRefCopy == null ? null
+ : ioManager.open(fRefCopy, IIOManager.FileReadWriteMode.READ_ONLY,
IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
try {
if (readHandle == null) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java
index bb69eec..9ee4a9e 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/partitions/PartitionManager.java
@@ -138,8 +138,7 @@
}
public void updatePartitionState(CcId ccId, PartitionId pid, TaskAttemptId taId, IPartition partition,
- PartitionState state)
- throws HyracksDataException {
+ PartitionState state) throws HyracksDataException {
PartitionDescriptor desc = new PartitionDescriptor(pid, ncs.getId(), taId, partition.isReusable());
desc.setState(state);
try {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
index 45e1236..0dc1fb6 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/profiling/ConnectorReceiverProfilingFrameReader.java
@@ -36,8 +36,8 @@
this.reader = reader;
this.openCounter = ctx.getCounterContext().getCounter(cdId + ".receiver." + receiverIndex + ".open", true);
this.closeCounter = ctx.getCounterContext().getCounter(cdId + ".receiver." + receiverIndex + ".close", true);
- this.frameCounter = ctx.getCounterContext()
- .getCounter(cdId + ".receiver." + receiverIndex + ".nextFrame", true);
+ this.frameCounter =
+ ctx.getCounterContext().getCounter(cdId + ".receiver." + receiverIndex + ".nextFrame", true);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java
index 4dc6bc9..38ae95b 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/resources/memory/FrameManager.java
@@ -51,8 +51,7 @@
}
if (bytes > FrameConstants.MAX_FRAMESIZE) {
throw new HyracksDataException(
- "Unable to allocate frame larger than:" + FrameConstants.MAX_FRAMESIZE
- + " bytes");
+ "Unable to allocate frame larger than:" + FrameConstants.MAX_FRAMESIZE + " bytes");
}
ByteBuffer buffer = ByteBuffer.allocate(bytes);
FrameHelper.serializeFrameSize(buffer, bytes / minFrameSize);
@@ -67,8 +66,8 @@
return allocateFrame(newSizeInBytes);
} else {
if (newSizeInBytes > FrameConstants.MAX_FRAMESIZE) {
- throw new HyracksDataException("Unable to allocate frame of size bigger than: "
- + FrameConstants.MAX_FRAMESIZE + " bytes");
+ throw new HyracksDataException(
+ "Unable to allocate frame of size bigger than: " + FrameConstants.MAX_FRAMESIZE + " bytes");
}
ByteBuffer buffer = allocateFrame(newSizeInBytes);
int limit = Math.min(newSizeInBytes, tobeDeallocate.capacity());
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java
index d1385ec..dfda463 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/DeployBinaryWork.java
@@ -54,8 +54,8 @@
public void run() {
DeploymentStatus status;
try {
- DeploymentUtils.deploy(deploymentId, binaryURLs, ncs.getContext()
- .getJobSerializerDeserializerContainer(), ncs.getServerContext(), true);
+ DeploymentUtils.deploy(deploymentId, binaryURLs, ncs.getContext().getJobSerializerDeserializerContainer(),
+ ncs.getServerContext(), true);
status = DeploymentStatus.SUCCEED;
} catch (Exception e) {
status = DeploymentStatus.FAIL;
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
index 3871302..cfd69ce 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-control-nc/src/main/java/org/apache/hyracks/control/nc/work/ReportPartitionAvailabilityWork.java
@@ -50,9 +50,13 @@
Map<JobId, Joblet> jobletMap = ncs.getJobletMap();
Joblet ji = jobletMap.get(pid.getJobId());
if (ji != null) {
- PartitionChannel channel = new PartitionChannel(pid, new NetworkInputChannel(ncs.getNetworkManager(),
- new InetSocketAddress(InetAddress.getByAddress(networkAddress.lookupIpAddress()),
- networkAddress.getPort()), pid, 5));
+ PartitionChannel channel =
+ new PartitionChannel(pid,
+ new NetworkInputChannel(ncs.getNetworkManager(),
+ new InetSocketAddress(
+ InetAddress.getByAddress(networkAddress.lookupIpAddress()),
+ networkAddress.getPort()),
+ pid, 5));
ji.reportPartitionAvailability(channel);
}
} catch (Exception e) {
diff --git a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
index 10fa679..32f6bf3 100644
--- a/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
+++ b/hyracks-fullstack/hyracks/hyracks-control/hyracks-nc-service/src/main/java/org/apache/hyracks/control/nc/service/NCServiceConfig.java
@@ -35,20 +35,16 @@
* If an option is specified both in the config file and on the command line, the config file
* version will take precedence.
*/
- @Option(name = "-config-file", required = false,
- usage = "Local NC configuration file (default: none)")
+ @Option(name = "-config-file", required = false, usage = "Local NC configuration file (default: none)")
public String configFile = null;
- @Option(name = "-address", required = false,
- usage = "Address to listen on for connections from CC (default: all addresses)")
+ @Option(name = "-address", required = false, usage = "Address to listen on for connections from CC (default: all addresses)")
public String address = null;
- @Option(name = "-port", required = false,
- usage = "Port to listen on for connections from CC (default: 9090)")
+ @Option(name = "-port", required = false, usage = "Port to listen on for connections from CC (default: 9090)")
public int port = 9090;
- @Option(name = "-logdir", required = false,
- usage = "Directory to log NC output ('-' for stdout of NC service; default: $app.home/logs)")
+ @Option(name = "-logdir", required = false, usage = "Directory to log NC output ('-' for stdout of NC service; default: $app.home/logs)")
public String logdir = null;
private Ini ini = null;
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java
index 2ce5291..98700a3 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/ByteArrayPointable.java
@@ -88,8 +88,8 @@
int thisArrayStart = this.getContentStartOffset();
int thatArrayStart = thatStart + getNumberBytesToStoreMeta(thatArrayLen);
- for (int thisIndex = 0, thatIndex = 0;
- thisIndex < thisArrayLen && thatIndex < thatArrayLen; ++thisIndex, ++thatIndex) {
+ for (int thisIndex = 0, thatIndex = 0; thisIndex < thisArrayLen
+ && thatIndex < thatArrayLen; ++thisIndex, ++thatIndex) {
if (this.bytes[thisArrayStart + thisIndex] != thatBytes[thatArrayStart + thatIndex]) {
return (0xff & this.bytes[thisArrayStart + thisIndex]) - (0xff & thatBytes[thatArrayStart + thatIndex]);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
index 0850b04..86a6f9c 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/primitive/UTF8StringPointable.java
@@ -512,8 +512,8 @@
boolean isLetter = Character.isLetter(originalChar);
// Make the first character into upper case while the later ones into lower case.
- char resultChar = toUpperCase && isLetter ? Character.toUpperCase(originalChar) : (isLetter ? Character
- .toLowerCase(originalChar) : originalChar);
+ char resultChar = toUpperCase && isLetter ? Character.toUpperCase(originalChar)
+ : (isLetter ? Character.toLowerCase(originalChar) : originalChar);
builder.appendChar(resultChar);
byteIndex += src.charSize(srcStart + byteIndex);
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java
index 1996b4e..c5b8e8a 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/BinaryHashSet.java
@@ -119,7 +119,6 @@
return putFindInternal(key, false, keyArray, increaseFoundCount);
}
-
// Put an entry or find an entry
private int putFindInternal(BinaryEntry key, boolean isInsert, byte[] keyArray, boolean increaseFoundCount)
throws HyracksDataException {
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java
index fe04146..2300c06 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/main/java/org/apache/hyracks/data/std/util/UTF8StringBuilder.java
@@ -33,7 +33,8 @@
}
}
- public void appendUtf8StringPointable(UTF8StringPointable src, int byteStartOffset, int byteLength) throws IOException {
+ public void appendUtf8StringPointable(UTF8StringPointable src, int byteStartOffset, int byteLength)
+ throws IOException {
out.write(src.getByteArray(), byteStartOffset, byteLength);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java
index fbc7aea..f7146df 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/ByteArrayPointableTest.java
@@ -27,8 +27,8 @@
@Test
public void testCompareTo() throws Exception {
- ByteArrayPointable byteArrayPointable = ByteArrayPointable
- .generatePointableFromPureBytes(new byte[] { 1, 2, 3, 4 });
+ ByteArrayPointable byteArrayPointable =
+ ByteArrayPointable.generatePointableFromPureBytes(new byte[] { 1, 2, 3, 4 });
testEqual(byteArrayPointable, ByteArrayPointable.generatePointableFromPureBytes(new byte[] { 1, 2, 3, 4 }));
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java
index 93b2290..302e7a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/primitive/UTF8StringPointableTest.java
@@ -34,8 +34,8 @@
public class UTF8StringPointableTest {
public static UTF8StringPointable STRING_EMPTY = generateUTF8Pointable(UTF8StringSample.EMPTY_STRING);
public static UTF8StringPointable STRING_UTF8_MIX = generateUTF8Pointable(UTF8StringSample.STRING_UTF8_MIX);
- public static UTF8StringPointable STRING_UTF8_MIX_LOWERCASE = generateUTF8Pointable(
- UTF8StringSample.STRING_UTF8_MIX_LOWERCASE);
+ public static UTF8StringPointable STRING_UTF8_MIX_LOWERCASE =
+ generateUTF8Pointable(UTF8StringSample.STRING_UTF8_MIX_LOWERCASE);
public static UTF8StringPointable STRING_LEN_127 = generateUTF8Pointable(UTF8StringSample.STRING_LEN_127);
public static UTF8StringPointable STRING_LEN_128 = generateUTF8Pointable(UTF8StringSample.STRING_LEN_128);
diff --git a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java
index fc8a6ab..c001ab3 100644
--- a/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-data/hyracks-data-std/src/test/java/org/apache/hyracks/data/std/util/UTF8StringCharacterIteratorTest.java
@@ -49,7 +49,7 @@
}
@Test
- public void testIterator(){
+ public void testIterator() {
testEachIterator(UTF8StringSample.EMPTY_STRING);
testEachIterator(UTF8StringSample.STRING_UTF8_MIX);
testEachIterator(UTF8StringSample.STRING_LEN_128);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java
index 8765bf1..5dc1fa3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameFixedFieldAppender.java
@@ -76,9 +76,9 @@
IntSerDeUtils.putInt(array, tupleDataEndOffset + currentField * 4, lastFieldEndOffset);
if (++currentField == fieldCount) {
tupleDataEndOffset += fieldCount * 4 + lastFieldEndOffset;
- IntSerDeUtils
- .putInt(array, FrameHelper.getTupleCountOffset(frame.getFrameSize()) - 4 * (tupleCount + 1),
- tupleDataEndOffset);
+ IntSerDeUtils.putInt(array,
+ FrameHelper.getTupleCountOffset(frame.getFrameSize()) - 4 * (tupleCount + 1),
+ tupleDataEndOffset);
++tupleCount;
IntSerDeUtils.putInt(array, FrameHelper.getTupleCountOffset(frame.getFrameSize()), tupleCount);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java
index b464f8e..4a324ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/comm/io/FrameTupleAppenderAccessor.java
@@ -47,9 +47,8 @@
@Override
public int getTupleStartOffset(int tupleIndex) {
- int offset = tupleIndex == 0 ?
- FrameConstants.TUPLE_START_OFFSET :
- IntSerDeUtils.getInt(getBuffer().array(), tupleCountOffset - 4 * tupleIndex);
+ int offset = tupleIndex == 0 ? FrameConstants.TUPLE_START_OFFSET
+ : IntSerDeUtils.getInt(getBuffer().array(), tupleCountOffset - 4 * tupleIndex);
return offset;
}
@@ -65,8 +64,8 @@
@Override
public int getFieldStartOffset(int tupleIndex, int fIdx) {
- return fIdx == 0 ? 0 : IntSerDeUtils.getInt(getBuffer().array(),
- getTupleStartOffset(tupleIndex) + (fIdx - 1) * 4);
+ return fIdx == 0 ? 0
+ : IntSerDeUtils.getInt(getBuffer().array(), getTupleStartOffset(tupleIndex) + (fIdx - 1) * 4);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
index ee5a041..dc66d19 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFactory.java
@@ -55,8 +55,8 @@
IBinaryHashFunction hashFn = hashFunctions[j];
int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
- int fh = hashFn
- .hash(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd - fStart);
+ int fh = hashFn.hash(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+ fEnd - fStart);
h = h * 31 + fh;
}
if (h < 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
index 458171c..4385bd5 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/FieldHashPartitionComputerFamily.java
@@ -30,7 +30,8 @@
private final int[] hashFields;
private final IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories;
- public FieldHashPartitionComputerFamily(int[] hashFields, IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories) {
+ public FieldHashPartitionComputerFamily(int[] hashFields,
+ IBinaryHashFunctionFamily[] hashFunctionGeneratorFactories) {
this.hashFields = hashFields;
this.hashFunctionGeneratorFactories = hashFunctionGeneratorFactories;
}
@@ -52,8 +53,8 @@
IBinaryHashFunction hashFn = hashFunctions[j];
int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
- int fh = hashFn
- .hash(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd - fStart);
+ int fh = hashFn.hash(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+ fEnd - fStart);
h += fh;
}
if (h < 0) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
index f4da9bf..d58a248 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/data/partition/range/FieldRangePartitionComputerFactory.java
@@ -86,8 +86,8 @@
int fIdx = rangeFields[f];
int fStart = accessor.getFieldStartOffset(tIndex, fIdx);
int fEnd = accessor.getFieldEndOffset(tIndex, fIdx);
- c = comparators[f].compare(accessor.getBuffer().array(), startOffset + slotLength + fStart, fEnd
- - fStart, rangeMap.getByteArray(fieldIndex, f), rangeMap.getStartOffset(fieldIndex, f),
+ c = comparators[f].compare(accessor.getBuffer().array(), startOffset + slotLength + fStart,
+ fEnd - fStart, rangeMap.getByteArray(fieldIndex, f), rangeMap.getStartOffset(fieldIndex, f),
rangeMap.getLength(fieldIndex, f));
if (c != 0) {
return c;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
index 81f06da..e99a2ff 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/SerdeUtils.java
@@ -112,7 +112,8 @@
return f.createBinaryComparator();
}
- public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes, int numSerdes) {
+ public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes,
+ int numSerdes) {
IBinaryComparatorFactory[] comparatorsFactories = new IBinaryComparatorFactory[numSerdes];
for (int i = 0; i < numSerdes; i++) {
comparatorsFactories[i] = serdeToComparatorFactory(serdes[i]);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
index 52bf893..08ed922 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/main/java/org/apache/hyracks/dataflow/common/utils/TupleUtils.java
@@ -67,7 +67,7 @@
}
public static void createIntegerTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, boolean filtered,
- final int... fields) throws HyracksDataException {
+ final int... fields) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
tupleBuilder.reset();
for (final int i : fields) {
@@ -88,8 +88,8 @@
public static ITupleReference createIntegerTuple(boolean filtered, final int... fields)
throws HyracksDataException {
- ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fields.length + 1)
- : new ArrayTupleBuilder(fields.length);
+ ArrayTupleBuilder tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fields.length + 1) : new ArrayTupleBuilder(fields.length);
ArrayTupleReference tuple = new ArrayTupleReference();
createIntegerTuple(tupleBuilder, tuple, fields);
return tuple;
@@ -122,8 +122,8 @@
StringBuilder strBuilder = new StringBuilder();
int numPrintFields = Math.min(tuple.getFieldCount(), fields.length);
for (int i = 0; i < numPrintFields; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Object o = fields[i].deserialize(dataIn);
strBuilder.append(o.toString());
@@ -139,8 +139,8 @@
int numFields = Math.min(tuple.getFieldCount(), fields.length);
Object[] objs = new Object[numFields];
for (int i = 0; i < numFields; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
objs[i] = fields[i].deserialize(dataIn);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
index 766c77a..c39648c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-common/src/test/java/org/apache/hyracks/dataflow/common/comm/io/largeobject/FrameFixedFieldTupleAppenderTest.java
@@ -155,8 +155,8 @@
private IFrameTupleAccessor prepareData(DATA_TYPE type) throws HyracksDataException {
IFrameTupleAccessor accessor = new FrameTupleAccessor(recordDescriptor);
- IFrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(new FrameManager(INPUT_BUFFER_SIZE)),
- true);
+ IFrameTupleAppender appender =
+ new FrameTupleAppender(new VSizeFrame(new FrameManager(INPUT_BUFFER_SIZE)), true);
int i = 0;
do {
switch (type) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
index e338961..c6512929 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractOperatorDescriptor.java
@@ -92,7 +92,7 @@
}
@Override
- public ObjectNode toJSON() {
+ public ObjectNode toJSON() {
ObjectMapper om = new ObjectMapper();
ObjectNode jop = om.createObjectNode();
jop.put("id", String.valueOf(getOperatorId()));
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
index 93a8120..6ae5b28 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/base/AbstractUnaryInputOperatorNodePushable.java
@@ -22,8 +22,8 @@
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
-public abstract class AbstractUnaryInputOperatorNodePushable extends AbstractOperatorNodePushable implements
- IFrameWriter {
+public abstract class AbstractUnaryInputOperatorNodePushable extends AbstractOperatorNodePushable
+ implements IFrameWriter {
protected IFrameWriter writer;
protected RecordDescriptor recordDesc;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
index 4499e32c..47b11ce 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/DeallocatableFramePool.java
@@ -64,7 +64,7 @@
private ByteBuffer mergeExistingFrames(int frameSize) throws HyracksDataException {
int mergedSize = memBudget - allocated;
- for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext(); ) {
+ for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext();) {
ByteBuffer buffer = iter.next();
iter.remove();
mergedSize += buffer.capacity();
@@ -88,7 +88,7 @@
}
private ByteBuffer findExistingFrame(int frameSize) {
- for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext(); ) {
+ for (Iterator<ByteBuffer> iter = buffers.iterator(); iter.hasNext();) {
ByteBuffer next = iter.next();
if (next.capacity() >= frameSize) {
iter.remove();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
index 700500b..6a60813 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/buffermanager/FrameBufferManager.java
@@ -59,4 +59,3 @@
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
index 4359b54..27e2671 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/collectors/SortMergeFrameReader.java
@@ -65,8 +65,8 @@
}
List<IFrameReader> batch = new ArrayList<IFrameReader>(nSenders);
pbm.getNextBatch(batch, nSenders);
- merger = new RunMergingFrameReader(ctx, batch, inFrames, sortFields,
- comparators, nmkComputer, recordDescriptor);
+ merger = new RunMergingFrameReader(ctx, batch, inFrames, sortFields, comparators, nmkComputer,
+ recordDescriptor);
} else {
// multi level merge.
throw new HyracksDataException("Not yet supported");
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
index b1cd83e..920fdb8 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/LocalityAwareMToNPartitioningConnectorDescriptor.java
@@ -82,8 +82,8 @@
expectedPartitions.set(i);
}
}
- NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
- expectedPartitions);
+ NonDeterministicChannelReader channelReader =
+ new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
return new PartitionCollector(ctx, getConnectorId(), receiverIndex, expectedPartitions, frameReader,
channelReader);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
index d26b9ef..02fbedb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningConnectorDescriptor.java
@@ -54,8 +54,8 @@
int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
BitSet expectedPartitions = new BitSet(nProducerPartitions);
expectedPartitions.set(0, nProducerPartitions);
- NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
- expectedPartitions);
+ NonDeterministicChannelReader channelReader =
+ new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, frameReader, channelReader);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
index edcad42..026ca5e 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
@@ -69,8 +69,8 @@
public IFrameWriter createPartitioner(IHyracksTaskContext ctx, RecordDescriptor recordDesc,
IPartitionWriterFactory edwFactory, int index, int nProducerPartitions, int nConsumerPartitions)
throws HyracksDataException {
- final PartitionDataWriter hashWriter = new PartitionDataWriter(ctx, nConsumerPartitions, edwFactory, recordDesc,
- tpcf.createPartitioner());
+ final PartitionDataWriter hashWriter =
+ new PartitionDataWriter(ctx, nConsumerPartitions, edwFactory, recordDesc, tpcf.createPartitioner());
return hashWriter;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
index eda353b..78428a3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/OneToOneConnectorDescriptor.java
@@ -57,8 +57,8 @@
int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
BitSet expectedPartitions = new BitSet(nProducerPartitions);
expectedPartitions.set(index);
- NonDeterministicChannelReader channelReader = new NonDeterministicChannelReader(nProducerPartitions,
- expectedPartitions);
+ NonDeterministicChannelReader channelReader =
+ new NonDeterministicChannelReader(nProducerPartitions, expectedPartitions);
NonDeterministicFrameReader frameReader = new NonDeterministicFrameReader(channelReader);
return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, frameReader, channelReader);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
index 97d5f2b..e1df709 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/connectors/PartitionWithMessageDataWriter.java
@@ -30,7 +30,7 @@
public PartitionWithMessageDataWriter(IHyracksTaskContext ctx, int consumerPartitionCount,
IPartitionWriterFactory pwFactory, RecordDescriptor recordDescriptor, ITuplePartitionComputer tpc)
- throws HyracksDataException {
+ throws HyracksDataException {
super(ctx, consumerPartitionCount, pwFactory, recordDescriptor, tpc);
// since the message partition writer sends broadcast messages, we allocate frames when we create the writer
for (int i = 0; i < consumerPartitionCount; ++i) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
index 543ad40..18dc9c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/AbstractFileWriteOperatorDescriptor.java
@@ -95,7 +95,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(ctx.getIoManager(),
- partition), recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
+ return new DeserializedOperatorNodePushable(ctx, new FileWriteOperator(ctx.getIoManager(), partition),
+ recordDescProvider.getInputRecordDescriptor(getActivityId(), 0));
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
index d121ec4..175bdae 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/file/DelimitedDataTupleParserFactory.java
@@ -67,8 +67,8 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(valueParsers.length);
DataOutput dos = tb.getDataOutput();
- FieldCursorForDelimitedDataParser cursor = new FieldCursorForDelimitedDataParser(
- new InputStreamReader(in), fieldDelimiter, quote);
+ FieldCursorForDelimitedDataParser cursor =
+ new FieldCursorForDelimitedDataParser(new InputStreamReader(in), fieldDelimiter, quote);
while (cursor.nextRecord()) {
tb.reset();
for (int i = 0; i < valueParsers.length; ++i) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
index 40f02f9..3232527 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/HashSpillableTableFactory.java
@@ -84,17 +84,18 @@
intermediateResultKeys[i] = i;
}
- final FrameTuplePairComparator ftpcInputCompareToAggregate = new FrameTuplePairComparator(keyFields,
- intermediateResultKeys, comparators);
+ final FrameTuplePairComparator ftpcInputCompareToAggregate =
+ new FrameTuplePairComparator(keyFields, intermediateResultKeys, comparators);
- final ITuplePartitionComputer tpc = new FieldHashPartitionComputerFamily(keyFields, hashFunctionFamilies)
- .createPartitioner(seed);
+ final ITuplePartitionComputer tpc =
+ new FieldHashPartitionComputerFamily(keyFields, hashFunctionFamilies).createPartitioner(seed);
// For calculating hash value for the already aggregated tuples (not incoming tuples)
// This computer is required to calculate the hash value of a aggregated tuple
// while doing the garbage collection work on Hash Table.
- final ITuplePartitionComputer tpcIntermediate = new FieldHashPartitionComputerFamily(intermediateResultKeys,
- hashFunctionFamilies).createPartitioner(seed);
+ final ITuplePartitionComputer tpcIntermediate =
+ new FieldHashPartitionComputerFamily(intermediateResultKeys, hashFunctionFamilies)
+ .createPartitioner(seed);
final IAggregatorDescriptor aggregator = aggregateFactory.createAggregator(ctx, inRecordDescriptor,
outRecordDescriptor, keyFields, intermediateResultKeys, null, -1);
@@ -110,9 +111,8 @@
final int numPartitions = getNumOfPartitions(inputDataBytesSize / ctx.getInitialFrameSize(), memoryBudget);
final int entriesPerPartition = (int) Math.ceil(1.0 * tableSize / numPartitions);
if (LOGGER.isDebugEnabled()) {
- LOGGER.debug(
- "created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize + " #partitions:"
- + numPartitions);
+ LOGGER.debug("created hashtable, table size:" + tableSize + " file size:" + inputDataBytesSize
+ + " #partitions:" + numPartitions);
}
final ArrayTupleBuilder outputTupleBuilder = new ArrayTupleBuilder(outRecordDescriptor.getFields().length);
@@ -122,14 +122,14 @@
private final TuplePointer pointer = new TuplePointer();
private final BitSet spilledSet = new BitSet(numPartitions);
// This frame pool will be shared by both data table and hash table.
- private final IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx,
- framesLimit * ctx.getInitialFrameSize());
+ private final IDeallocatableFramePool framePool =
+ new DeallocatableFramePool(ctx, framesLimit * ctx.getInitialFrameSize());
// buffer manager for hash table
- private final ISimpleFrameBufferManager bufferManagerForHashTable = new FramePoolBackedFrameBufferManager(
- framePool);
+ private final ISimpleFrameBufferManager bufferManagerForHashTable =
+ new FramePoolBackedFrameBufferManager(framePool);
- private final ISerializableTable hashTableForTuplePointer = new SerializableHashTable(tableSize, ctx,
- bufferManagerForHashTable);
+ private final ISerializableTable hashTableForTuplePointer =
+ new SerializableHashTable(tableSize, ctx, bufferManagerForHashTable);
// buffer manager for data table
final IPartitionedTupleBufferManager bufferManager = new VPartitionTupleBufferManager(
@@ -138,8 +138,8 @@
final ITuplePointerAccessor bufferAccessor = bufferManager.getTuplePointerAccessor(outRecordDescriptor);
- private final PreferToSpillFullyOccupiedFramePolicy spillPolicy = new PreferToSpillFullyOccupiedFramePolicy(
- bufferManager, spilledSet);
+ private final PreferToSpillFullyOccupiedFramePolicy spillPolicy =
+ new PreferToSpillFullyOccupiedFramePolicy(bufferManager, spilledSet);
private final FrameTupleAppender outputAppender = new FrameTupleAppender(new VSizeFrame(ctx));
@@ -157,8 +157,8 @@
// Checks whether the garbage collection is required and conducts a garbage collection if so.
if (hashTableForTuplePointer.isGarbageCollectionNeeded()) {
- int numberOfFramesReclaimed = hashTableForTuplePointer.collectGarbage(bufferAccessor,
- tpcIntermediate);
+ int numberOfFramesReclaimed =
+ hashTableForTuplePointer.collectGarbage(bufferAccessor, tpcIntermediate);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Garbage Collection on Hash table is done. Deallocated frames:"
+ numberOfFramesReclaimed);
@@ -310,8 +310,8 @@
// partition again and again.
return 2;
}
- long numberOfPartitions = (long) (Math
- .ceil((nubmerOfInputFrames * FUDGE_FACTOR - frameLimit) / (frameLimit - 1)));
+ long numberOfPartitions =
+ (long) (Math.ceil((nubmerOfInputFrames * FUDGE_FACTOR - frameLimit) / (frameLimit - 1)));
numberOfPartitions = Math.max(2, numberOfPartitions);
if (numberOfPartitions > frameLimit) {
numberOfPartitions = (long) Math.ceil(Math.sqrt(nubmerOfInputFrames * FUDGE_FACTOR));
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
index 7acd687..cb32c4a 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldGroupAggregatorFactory.java
@@ -113,7 +113,8 @@
int count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
try {
@@ -139,7 +140,8 @@
int sum = 0, count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
index b77c91c..290cc58 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/AvgFieldMergeAggregatorFactory.java
@@ -118,7 +118,8 @@
int sum = 0, count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
count += 1;
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
@@ -156,8 +157,10 @@
int count = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
- count += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart + 4);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ count += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart + 4);
if (!useObjectState) {
try {
fieldOutput.writeInt(sum);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
index 771303f..fc8d956 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/FloatSumFieldAggregatorFactory.java
@@ -35,8 +35,7 @@
/**
*
*/
-public class FloatSumFieldAggregatorFactory implements
- IFieldAggregateDescriptorFactory {
+public class FloatSumFieldAggregatorFactory implements IFieldAggregateDescriptorFactory {
private static final long serialVersionUID = 1L;
@@ -44,7 +43,7 @@
private final boolean useObjectState;
- public FloatSumFieldAggregatorFactory(int aggField, boolean useObjState){
+ public FloatSumFieldAggregatorFactory(int aggField, boolean useObjState) {
this.aggField = aggField;
this.useObjectState = useObjState;
}
@@ -53,8 +52,7 @@
* @see org.apache.hyracks.dataflow.std.group.IFieldAggregateDescriptorFactory#createAggregator(org.apache.hyracks.api.context.IHyracksTaskContext, org.apache.hyracks.api.dataflow.value.RecordDescriptor, org.apache.hyracks.api.dataflow.value.RecordDescriptor)
*/
@Override
- public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx,
- RecordDescriptor inRecordDescriptor,
+ public IFieldAggregateDescriptor createAggregator(IHyracksTaskContext ctx, RecordDescriptor inRecordDescriptor,
RecordDescriptor outRecordDescriptor) throws HyracksDataException {
return new IFieldAggregateDescriptor() {
@@ -64,8 +62,8 @@
}
@Override
- public void outputPartialResult(DataOutput fieldOutput, byte[] data,
- int offset, AggregateState state) throws HyracksDataException {
+ public void outputPartialResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state)
+ throws HyracksDataException {
float sum;
if (!useObjectState) {
sum = FloatPointable.getFloat(data, offset);
@@ -80,8 +78,8 @@
}
@Override
- public void outputFinalResult(DataOutput fieldOutput, byte[] data,
- int offset, AggregateState state) throws HyracksDataException {
+ public void outputFinalResult(DataOutput fieldOutput, byte[] data, int offset, AggregateState state)
+ throws HyracksDataException {
float sum;
if (!useObjectState) {
sum = FloatPointable.getFloat(data, offset);
@@ -106,14 +104,14 @@
}
@Override
- public void init(IFrameTupleAccessor accessor, int tIndex,
- DataOutput fieldOutput, AggregateState state)
+ public void init(IFrameTupleAccessor accessor, int tIndex, DataOutput fieldOutput, AggregateState state)
throws HyracksDataException {
float sum = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += FloatPointable.getFloat(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += FloatPointable.getFloat(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
try {
@@ -138,13 +136,13 @@
}
@Override
- public void aggregate(IFrameTupleAccessor accessor, int tIndex,
- byte[] data, int offset, AggregateState state)
- throws HyracksDataException {
+ public void aggregate(IFrameTupleAccessor accessor, int tIndex, byte[] data, int offset,
+ AggregateState state) throws HyracksDataException {
float sum = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += FloatPointable.getFloat(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += FloatPointable.getFloat(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
index 511b651..90e1474 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/IntSumFieldAggregatorFactory.java
@@ -107,7 +107,8 @@
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
try {
@@ -143,7 +144,8 @@
int sum = 0;
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
- sum += IntegerPointable.getInteger(accessor.getBuffer().array(), tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
+ sum += IntegerPointable.getInteger(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart);
if (!useObjectState) {
ByteBuffer buf = ByteBuffer.wrap(data);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
index 6900918..d43d4fd 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MinMaxStringFieldAggregatorFactory.java
@@ -113,9 +113,9 @@
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
int fieldLength = accessor.getFieldLength(tIndex, aggField);
- String strField = utf8SerializerDeserializer.deserialize(new DataInputStream(
- new ByteArrayInputStream(accessor.getBuffer().array(), tupleOffset
- + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
+ String strField = utf8SerializerDeserializer
+ .deserialize(new DataInputStream(new ByteArrayInputStream(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
if (hasBinaryState) {
// Object-binary-state
Object[] storedState;
@@ -158,9 +158,9 @@
int tupleOffset = accessor.getTupleStartOffset(tIndex);
int fieldStart = accessor.getFieldStartOffset(tIndex, aggField);
int fieldLength = accessor.getFieldLength(tIndex, aggField);
- String strField = utf8SerializerDeserializer.deserialize(new DataInputStream(
- new ByteArrayInputStream(accessor.getBuffer().array(), tupleOffset
- + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
+ String strField = utf8SerializerDeserializer
+ .deserialize(new DataInputStream(new ByteArrayInputStream(accessor.getBuffer().array(),
+ tupleOffset + accessor.getFieldSlotsLength() + fieldStart, fieldLength)));
if (hasBinaryState) {
int stateIdx = IntegerPointable.getInteger(data, offset);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
index 595e2c4..d2aa35f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/aggregators/MultiFieldsAggregatorFactory.java
@@ -95,8 +95,8 @@
}
@Override
- public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor, int tIndex,
- AggregateState state) throws HyracksDataException {
+ public boolean outputFinalResult(ArrayTupleBuilder tupleBuilder, IFrameTupleAccessor stateAccessor,
+ int tIndex, AggregateState state) throws HyracksDataException {
DataOutput dos = tupleBuilder.getDataOutput();
int tupleOffset = stateAccessor.getTupleStartOffset(tIndex);
@@ -151,8 +151,8 @@
int fieldIndex = 0;
for (int i = 0; i < aggregators.length; i++) {
if (aggregators[i].needsBinaryState()) {
- int stateFieldOffset = stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length
- + fieldIndex);
+ int stateFieldOffset =
+ stateAccessor.getFieldStartOffset(stateTupleIndex, keys.length + fieldIndex);
aggregators[i].aggregate(accessor, tIndex, stateAccessor.getBuffer().array(),
stateTupleOffset + stateAccessor.getFieldSlotsLength() + stateFieldOffset,
((AggregateState[]) state.state)[i]);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
index 7e6e147..43f57af 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupBuildOperatorNodePushable.java
@@ -71,8 +71,8 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- this.firstNormalizerComputer = firstNormalizerFactory == null ? null
- : firstNormalizerFactory.createNormalizedKeyComputer();
+ this.firstNormalizerComputer =
+ firstNormalizerFactory == null ? null : firstNormalizerFactory.createNormalizedKeyComputer();
this.spillableTableFactory = spillableTableFactory;
this.inRecordDescriptor = inRecordDescriptor;
this.outRecordDescriptor = outRecordDescriptor;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
index 23dee02..6dea186 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupOperatorDescriptor.java
@@ -124,7 +124,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new ExternalGroupBuildOperatorNodePushable(ctx, new TaskId(getActivityId(), partition), tableSize,
fileSize, keyFields, framesLimit, comparatorFactories, firstNormalizerFactory,
partialAggregatorFactory, recordDescProvider.getInputRecordDescriptor(getActivityId(), 0),
@@ -142,7 +142,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new ExternalGroupWriteOperatorNodePushable(ctx,
new TaskId(new ActivityId(getOperatorId(), AGGREGATE_ACTIVITY_ID), partition),
spillableTableFactory, partialRecDesc, outRecDesc, framesLimit, keyFields, firstNormalizerFactory,
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
index fb88775..95994f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalGroupWriteOperatorNodePushable.java
@@ -130,8 +130,8 @@
runs[i].getFileSize(), mergeGroupFields, groupByComparators, nmkComputer,
mergeAggregatorFactory, partialAggRecordDesc, outRecordDesc, frameLimit, level);
RunFileWriter[] runFileWriters = new RunFileWriter[partitionTable.getNumPartitions()];
- int[] sizeInTuplesNextLevel = buildGroup(runs[i].createDeleteOnCloseReader(), partitionTable,
- runFileWriters);
+ int[] sizeInTuplesNextLevel =
+ buildGroup(runs[i].createDeleteOnCloseReader(), partitionTable, runFileWriters);
for (int idFile = 0; idFile < runFileWriters.length; idFile++) {
if (runFileWriters[idFile] != null) {
generatedRuns.add(runFileWriters[idFile]);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
index d29e9ab..8e7777f 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/external/ExternalHashGroupBy.java
@@ -63,8 +63,7 @@
}
}
- private void flushPartitionToRun(int partition, RunFileWriter writer)
- throws HyracksDataException {
+ private void flushPartitionToRun(int partition, RunFileWriter writer) throws HyracksDataException {
try {
spilledNumTuples[partition] += table.flushFrames(partition, writer, AggregateType.PARTIAL);
table.clear(partition);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
index db6102e..ca78046 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/group/preclustered/PreclusteredGroupWriter.java
@@ -82,9 +82,8 @@
// Deducts input/output frames.
this.memoryLimit = framesLimit <= 0 ? -1 : ((long) (framesLimit - 2)) * ctx.getInitialFrameSize();
- this.aggregator =
- aggregatorFactory.createAggregator(ctx, inRecordDesc, outRecordDesc, groupFields, groupFields, writer,
- this.memoryLimit);
+ this.aggregator = aggregatorFactory.createAggregator(ctx, inRecordDesc, outRecordDesc, groupFields, groupFields,
+ writer, this.memoryLimit);
this.aggregateState = aggregator.createAggregateStates();
copyFrame = new VSizeFrame(ctx);
inFrameAccessor = new FrameTupleAccessor(inRecordDesc);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
index 10cc954..ec652eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/intersect/IntersectOperatorDescriptor.java
@@ -197,11 +197,10 @@
this.allProjectFields = projectedFields;
this.firstKeyNormalizerComputer =
firstKeyNormalizerFactory != null ? firstKeyNormalizerFactory.createNormalizedKeyComputer() : null;
- this.normalizedKeyDecisive =
- firstKeyNormalizerFactory != null
- ? firstKeyNormalizerFactory.getNormalizedKeyProperties().isDecisive()
- && compareFields[0].length == 1
- : false;
+ this.normalizedKeyDecisive = firstKeyNormalizerFactory != null
+ ? firstKeyNormalizerFactory.getNormalizedKeyProperties().isDecisive()
+ && compareFields[0].length == 1
+ : false;
comparators = new IBinaryComparator[compareFields[0].length];
for (int i = 0; i < comparators.length; i++) {
comparators[i] = comparatorFactory[i].createBinaryComparator();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
index 665bb2b..bee0590 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/HybridHashJoinOperatorDescriptor.java
@@ -176,22 +176,22 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length]
- : null;
+ final IMissingWriter[] nullWriters1 =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length] : null;
if (isLeftOuter) {
for (int i = 0; i < nonMatchWriterFactories1.length; i++) {
nullWriters1[i] = nonMatchWriterFactories1[i].createMissingWriter();
}
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(
ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition));
private final FrameTupleAccessor accessorBuild = new FrameTupleAccessor(rd1);
- private final ITuplePartitionComputer hpcBuild = new FieldHashPartitionComputerFactory(keys1,
- hashFunctionFactories).createPartitioner();
+ private final ITuplePartitionComputer hpcBuild =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
private final FrameTupleAppender appender = new FrameTupleAppender();
private final FrameTupleAppender ftappender = new FrameTupleAppender();
private IFrame[] bufferForPartitions;
@@ -285,8 +285,8 @@
if (memsize > inputsize0) {
state.nPartitions = 0;
} else {
- state.nPartitions = (int) (Math
- .ceil((inputsize0 * factor / nPartitions - memsize) / (memsize - 1)));
+ state.nPartitions =
+ (int) (Math.ceil((inputsize0 * factor / nPartitions - memsize) / (memsize - 1)));
}
if (state.nPartitions <= 0) {
// becomes in-memory HJ
@@ -303,10 +303,10 @@
throw new HyracksDataException("not enough memory");
}
- ITuplePartitionComputer hpc0 = new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories)
- .createPartitioner();
- ITuplePartitionComputer hpc1 = new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories)
- .createPartitioner();
+ ITuplePartitionComputer hpc0 =
+ new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner();
+ ITuplePartitionComputer hpc1 =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
int tableSize = (int) (state.memoryForHashtable * recordsPerFrame * factor);
ISerializableTable table = new SimpleSerializableHashTable(tableSize, ctx);
state.joiner =
@@ -369,23 +369,23 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length]
- : null;
+ final IMissingWriter[] nullWriters1 =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories1.length] : null;
if (isLeftOuter) {
for (int i = 0; i < nonMatchWriterFactories1.length; i++) {
nullWriters1[i] = nonMatchWriterFactories1[i].createMissingWriter();
}
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
IOperatorNodePushable op = new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private BuildAndPartitionTaskState state;
private final FrameTupleAccessor accessorProbe = new FrameTupleAccessor(rd0);
- private final ITuplePartitionComputerFactory hpcf0 = new FieldHashPartitionComputerFactory(keys0,
- hashFunctionFactories);
- private final ITuplePartitionComputerFactory hpcf1 = new FieldHashPartitionComputerFactory(keys1,
- hashFunctionFactories);
+ private final ITuplePartitionComputerFactory hpcf0 =
+ new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories);
+ private final ITuplePartitionComputerFactory hpcf1 =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories);
private final ITuplePartitionComputer hpcProbe = hpcf0.createPartitioner();
private final FrameTupleAppender appender = new FrameTupleAppender();
@@ -476,10 +476,10 @@
} finally {
state.joiner.releaseMemory();
}
- ITuplePartitionComputer hpcRep0 = new RepartitionComputerFactory(state.nPartitions, hpcf0)
- .createPartitioner();
- ITuplePartitionComputer hpcRep1 = new RepartitionComputerFactory(state.nPartitions, hpcf1)
- .createPartitioner();
+ ITuplePartitionComputer hpcRep0 =
+ new RepartitionComputerFactory(state.nPartitions, hpcf0).createPartitioner();
+ ITuplePartitionComputer hpcRep1 =
+ new RepartitionComputerFactory(state.nPartitions, hpcf1).createPartitioner();
if (state.memoryForHashtable != memsize - 2) {
for (int i = 0; i < state.nPartitions; i++) {
ByteBuffer buf = bufferForPartitions[i].getBuffer();
@@ -505,11 +505,10 @@
continue;
}
table.reset();
- InMemoryHashJoin joiner =
- new InMemoryHashJoin(ctx, new FrameTupleAccessor(rd0), hpcRep0,
- new FrameTupleAccessor(rd1), rd1, hpcRep1,
- new FrameTuplePairComparator(keys0, keys1, comparators), isLeftOuter,
- nullWriters1, table, predEvaluator, null);
+ InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(rd0),
+ hpcRep0, new FrameTupleAccessor(rd1), rd1, hpcRep1,
+ new FrameTuplePairComparator(keys0, keys1, comparators), isLeftOuter,
+ nullWriters1, table, predEvaluator, null);
if (buildWriter != null) {
RunFileReader buildReader = buildWriter.createDeleteOnCloseReader();
@@ -559,8 +558,8 @@
private void write(int i, ByteBuffer head) throws HyracksDataException {
RunFileWriter writer = probeWriters[i];
if (writer == null) {
- FileReference file = ctx
- .createManagedWorkspaceFile(PartitionAndJoinActivityNode.class.getSimpleName());
+ FileReference file =
+ ctx.createManagedWorkspaceFile(PartitionAndJoinActivityNode.class.getSimpleName());
writer = new RunFileWriter(file, ctx.getIoManager());
writer.open();
probeWriters[i] = writer;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
index a51b780..10c6227 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoin.java
@@ -66,22 +66,20 @@
private static final Logger LOGGER = LogManager.getLogger();
- public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
- ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild,
- ITuplePartitionComputer tpcBuild, FrameTuplePairComparator comparator, boolean isLeftOuter,
- IMissingWriter[] missingWritersBuild, ISerializableTable table, IPredicateEvaluator predEval,
- ISimpleFrameBufferManager bufferManager)
+ public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe, ITuplePartitionComputer tpcProbe,
+ FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild,
+ FrameTuplePairComparator comparator, boolean isLeftOuter, IMissingWriter[] missingWritersBuild,
+ ISerializableTable table, IPredicateEvaluator predEval, ISimpleFrameBufferManager bufferManager)
throws HyracksDataException {
this(ctx, accessorProbe, tpcProbe, accessorBuild, rDBuild, tpcBuild, comparator, isLeftOuter,
missingWritersBuild, table, predEval, false, bufferManager);
}
- public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe,
- ITuplePartitionComputer tpcProbe, FrameTupleAccessor accessorBuild,
- RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild, FrameTuplePairComparator comparator,
- boolean isLeftOuter, IMissingWriter[] missingWritersBuild, ISerializableTable table,
- IPredicateEvaluator predEval, boolean reverse, ISimpleFrameBufferManager bufferManager)
- throws HyracksDataException {
+ public InMemoryHashJoin(IHyracksTaskContext ctx, FrameTupleAccessor accessorProbe, ITuplePartitionComputer tpcProbe,
+ FrameTupleAccessor accessorBuild, RecordDescriptor rDBuild, ITuplePartitionComputer tpcBuild,
+ FrameTuplePairComparator comparator, boolean isLeftOuter, IMissingWriter[] missingWritersBuild,
+ ISerializableTable table, IPredicateEvaluator predEval, boolean reverse,
+ ISimpleFrameBufferManager bufferManager) throws HyracksDataException {
this.table = table;
storedTuplePointer = new TuplePointer();
buffers = new ArrayList<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
index d81d955..3873bae 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/InMemoryHashJoinOperatorDescriptor.java
@@ -162,15 +162,15 @@
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length]
- : null;
+ final IMissingWriter[] nullWriters1 =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length] : null;
if (isLeftOuter) {
for (int i = 0; i < nonMatchWriterFactories.length; i++) {
nullWriters1[i] = nonMatchWriterFactories[i].createMissingWriter();
}
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
final int memSizeInBytes = memSizeInFrames * ctx.getInitialFrameSize();
final IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx, memSizeInBytes);
@@ -181,10 +181,10 @@
@Override
public void open() throws HyracksDataException {
- ITuplePartitionComputer hpc0 = new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories)
- .createPartitioner();
- ITuplePartitionComputer hpc1 = new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories)
- .createPartitioner();
+ ITuplePartitionComputer hpc0 =
+ new FieldHashPartitionComputerFactory(keys0, hashFunctionFactories).createPartitioner();
+ ITuplePartitionComputer hpc1 =
+ new FieldHashPartitionComputerFactory(keys1, hashFunctionFactories).createPartitioner();
state = new HashBuildTaskState(ctx.getJobletContext().getJobId(),
new TaskId(getActivityId(), partition));
ISerializableTable table = new SerializableHashTable(tableSize, ctx, bufferManager);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
index 5306ae5..9c5dc60 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/JoinComparator.java
@@ -51,8 +51,8 @@
int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
int fLen1 = fEnd1 - fStart1;
- int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
- .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+ int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0,
+ accessor1.getBuffer().array(), fStart1 + fStartOffset1, fLen1);
if (c != 0) {
return c;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
index d9c0bcd..d0f5a73 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoin.java
@@ -70,9 +70,9 @@
if (memSize < 3) {
throw new HyracksDataException("Not enough memory is available for Nested Loop Join");
}
- this.outerBufferMngr = new VariableFrameMemoryManager(
- new VariableFramePool(ctx, ctx.getInitialFrameSize() * (memSize - 2)),
- FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT, memSize - 2));
+ this.outerBufferMngr =
+ new VariableFrameMemoryManager(new VariableFramePool(ctx, ctx.getInitialFrameSize() * (memSize - 2)),
+ FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT, memSize - 2));
this.predEvaluator = predEval;
this.isReversed = false;
@@ -90,8 +90,8 @@
missingTupleBuilder = null;
}
- FileReference file = ctx.getJobletContext()
- .createManagedWorkspaceFile(this.getClass().getSimpleName() + this.toString());
+ FileReference file =
+ ctx.getJobletContext().createManagedWorkspaceFile(this.getClass().getSimpleName() + this.toString());
runFileWriter = new RunFileWriter(file, ctx.getIoManager());
runFileWriter.open();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
index 99dbfad..2236056 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/NestedLoopJoinOperatorDescriptor.java
@@ -117,8 +117,8 @@
final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(nljAid, 0);
final RecordDescriptor rd1 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
final ITuplePairComparator comparator = comparatorFactory.createTuplePairComparator(ctx);
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory != null)
- ? predEvaluatorFactory.createPredicateEvaluator() : null;
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory != null) ? predEvaluatorFactory.createPredicateEvaluator() : null;
final IMissingWriter[] nullWriters1 = isLeftOuter ? new IMissingWriter[nullWriterFactories1.length] : null;
if (isLeftOuter) {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
index d49a6dd..ddf1741 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoin.java
@@ -110,8 +110,7 @@
private int[] probePSizeInTups;
public OptimizedHybridHashJoin(IHyracksTaskContext ctx, int memSizeInFrames, int numOfPartitions,
- String probeRelName,
- String buildRelName, int[] probeKeys, int[] buildKeys, IBinaryComparator[] comparators,
+ String probeRelName, String buildRelName, int[] probeKeys, int[] buildKeys, IBinaryComparator[] comparators,
RecordDescriptor probeRd, RecordDescriptor buildRd, ITuplePartitionComputer probeHpc,
ITuplePartitionComputer buildHpc, IPredicateEvaluator predEval, boolean isLeftOuter,
IMissingWriterFactory[] nullWriterFactories1) {
@@ -259,8 +258,8 @@
break;
}
try {
- for (int pid = spilledStatus.nextSetBit(0); pid >= 0
- && pid < numOfPartitions; pid = spilledStatus.nextSetBit(pid + 1)) {
+ for (int pid = spilledStatus.nextSetBit(0); pid >= 0 && pid < numOfPartitions; pid =
+ spilledStatus.nextSetBit(pid + 1)) {
if (bufferManager.getNumTuples(pid) > 0) {
bufferManager.flushPartition(pid, getSpillWriterOrCreateNewOneIfNotExist(pid, whichSide));
bufferManager.clearPartition(pid);
@@ -293,16 +292,15 @@
// For partitions in main memory, we deduct their size from the free space.
int inMemTupCount = 0;
- for (int p = spilledStatus.nextClearBit(0); p >= 0
- && p < numOfPartitions; p = spilledStatus.nextClearBit(p + 1)) {
+ for (int p = spilledStatus.nextClearBit(0); p >= 0 && p < numOfPartitions; p =
+ spilledStatus.nextClearBit(p + 1)) {
freeSpace -= bufferManager.getPhysicalSize(p);
inMemTupCount += buildPSizeInTups[p];
}
// Calculates the expected hash table size for the given number of tuples in main memory
// and deducts it from the free space.
- long hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount,
- frameSize);
+ long hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount, frameSize);
freeSpace -= hashTableByteSizeForInMemTuples;
// In the case where free space is less than zero after considering the hash table size,
@@ -317,8 +315,9 @@
int pidToSpill = selectSinglePartitionToSpill(freeSpace, inMemTupCount, frameSize);
if (pidToSpill >= 0) {
// There is a suitable one. We spill that partition to the disk.
- long hashTableSizeDecrease = -SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(
- inMemTupCount, -buildPSizeInTups[pidToSpill], frameSize);
+ long hashTableSizeDecrease =
+ -SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(inMemTupCount,
+ -buildPSizeInTups[pidToSpill], frameSize);
freeSpace = freeSpace + bufferManager.getPhysicalSize(pidToSpill) + hashTableSizeDecrease;
inMemTupCount -= buildPSizeInTups[pidToSpill];
spillPartition(pidToSpill);
@@ -327,8 +326,8 @@
} else {
// There is no single suitable partition. So, we need to spill multiple partitions to the disk
// in order to accommodate the hash table.
- for (int p = spilledStatus.nextClearBit(0); p >= 0
- && p < numOfPartitions; p = spilledStatus.nextClearBit(p + 1)) {
+ for (int p = spilledStatus.nextClearBit(0); p >= 0 && p < numOfPartitions; p =
+ spilledStatus.nextClearBit(p + 1)) {
int spaceToBeReturned = bufferManager.getPhysicalSize(p);
int numberOfTuplesToBeSpilled = buildPSizeInTups[p];
if (spaceToBeReturned == 0 || numberOfTuplesToBeSpilled == 0) {
@@ -340,9 +339,9 @@
// Since the number of tuples in memory has been decreased,
// the hash table size will be decreased, too.
// We put minus since the method returns a negative value to represent a newly reclaimed space.
- long expectedHashTableSizeDecrease = -SerializableHashTable
- .calculateByteSizeDeltaForTableSizeChange(inMemTupCount, -numberOfTuplesToBeSpilled,
- frameSize);
+ long expectedHashTableSizeDecrease =
+ -SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(inMemTupCount,
+ -numberOfTuplesToBeSpilled, frameSize);
freeSpace = freeSpace + spaceToBeReturned + expectedHashTableSizeDecrease;
// Adjusts the hash table size
inMemTupCount -= numberOfTuplesToBeSpilled;
@@ -356,8 +355,7 @@
// If more partitions have been spilled to the disk, calculate the expected hash table size again
// before bringing some partitions to main memory.
if (moreSpilled) {
- hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount,
- frameSize);
+ hashTableByteSizeForInMemTuples = SerializableHashTable.getExpectedTableByteSize(inMemTupCount, frameSize);
}
// Brings back some partitions if there is enough free space.
@@ -387,8 +385,8 @@
long minSpaceAfterSpill = (long) memSizeInFrames * frameSize;
int minSpaceAfterSpillPartID = -1;
- for (int p = spilledStatus.nextClearBit(0); p >= 0
- && p < numOfPartitions; p = spilledStatus.nextClearBit(p + 1)) {
+ for (int p = spilledStatus.nextClearBit(0); p >= 0 && p < numOfPartitions; p =
+ spilledStatus.nextClearBit(p + 1)) {
if (buildPSizeInTups[p] == 0 || bufferManager.getPhysicalSize(p) == 0) {
continue;
}
@@ -408,8 +406,8 @@
}
private int selectPartitionsToReload(long freeSpace, int pid, int inMemTupCount) {
- for (int i = spilledStatus.nextSetBit(pid); i >= 0
- && i < numOfPartitions; i = spilledStatus.nextSetBit(i + 1)) {
+ for (int i = spilledStatus.nextSetBit(pid); i >= 0 && i < numOfPartitions; i =
+ spilledStatus.nextSetBit(i + 1)) {
int spilledTupleCount = buildPSizeInTups[i];
// Expected hash table size increase after reloading this partition
long expectedHashTableByteSizeIncrease = SerializableHashTable.calculateByteSizeDeltaForTableSizeChange(
@@ -452,10 +450,10 @@
private void createInMemoryJoiner(int inMemTupCount) throws HyracksDataException {
ISerializableTable table = new SerializableHashTable(inMemTupCount, ctx, bufferManagerForHashTable);
- this.inMemJoiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRd), probeHpc,
- new FrameTupleAccessor(buildRd), buildRd, buildHpc,
- new FrameTuplePairComparator(probeKeys, buildKeys, comparators), isLeftOuter, nonMatchWriters, table,
- predEvaluator, isReversed, bufferManagerForHashTable);
+ this.inMemJoiner =
+ new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRd), probeHpc, new FrameTupleAccessor(buildRd),
+ buildRd, buildHpc, new FrameTuplePairComparator(probeKeys, buildKeys, comparators), isLeftOuter,
+ nonMatchWriters, table, predEvaluator, isReversed, bufferManagerForHashTable);
}
private void loadDataInMemJoin() throws HyracksDataException {
@@ -632,8 +630,8 @@
buf.append("(A) Spilled partitions" + "\n");
int spilledTupleCount = 0;
int spilledPartByteSize = 0;
- for (int pid = spilledStatus.nextSetBit(0); pid >= 0
- && pid < numOfPartitions; pid = spilledStatus.nextSetBit(pid + 1)) {
+ for (int pid = spilledStatus.nextSetBit(0); pid >= 0 && pid < numOfPartitions; pid =
+ spilledStatus.nextSetBit(pid + 1)) {
if (whichSide == SIDE.BUILD) {
spilledTupleCount += buildPSizeInTups[pid];
spilledPartByteSize += buildRFWriters[pid].getFileSize();
@@ -653,8 +651,8 @@
buf.append("(B) In-memory partitions" + "\n");
int inMemoryTupleCount = 0;
int inMemoryPartByteSize = 0;
- for (int pid = spilledStatus.nextClearBit(0); pid >= 0
- && pid < numOfPartitions; pid = spilledStatus.nextClearBit(pid + 1)) {
+ for (int pid = spilledStatus.nextClearBit(0); pid >= 0 && pid < numOfPartitions; pid =
+ spilledStatus.nextClearBit(pid + 1)) {
if (whichSide == SIDE.BUILD) {
inMemoryTupleCount += buildPSizeInTups[pid];
inMemoryPartByteSize += bufferManager.getPhysicalSize(pid);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
index 8dbe9b0..9eeb363 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/join/OptimizedHybridHashJoinOperatorDescriptor.java
@@ -268,17 +268,19 @@
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IPredicateEvaluator predEvaluator = (predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator());
+ final IPredicateEvaluator predEvaluator =
+ (predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator());
IOperatorNodePushable op = new AbstractUnaryInputSinkOperatorNodePushable() {
private BuildAndPartitionTaskState state = new BuildAndPartitionTaskState(
ctx.getJobletContext().getJobId(), new TaskId(getActivityId(), partition));
- ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
- hashFunctionGeneratorFactories).createPartitioner(0);
- ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
- hashFunctionGeneratorFactories).createPartitioner(0);
+ ITuplePartitionComputer probeHpc =
+ new FieldHashPartitionComputerFamily(probeKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(0);
+ ITuplePartitionComputer buildHpc =
+ new FieldHashPartitionComputerFamily(buildKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(0);
boolean isFailed = false;
@Override
@@ -287,8 +289,8 @@
throw new HyracksDataException("Not enough memory is assigend for Hybrid Hash Join.");
}
state.memForJoin = memSizeInFrames - 2;
- state.numOfPartitions = getNumberOfPartitions(state.memForJoin, inputsize0, fudgeFactor,
- nPartitions);
+ state.numOfPartitions =
+ getNumberOfPartitions(state.memForJoin, inputsize0, fudgeFactor, nPartitions);
state.hybridHJ = new OptimizedHybridHashJoin(ctx, state.memForJoin, state.numOfPartitions,
PROBE_REL, BUILD_REL, probeKeys, buildKeys, comparators, probeRd, buildRd, probeHpc,
buildHpc, predEvaluator, isLeftOuter, nonMatchWriterFactories);
@@ -355,21 +357,21 @@
final RecordDescriptor buildRd = recordDescProvider.getInputRecordDescriptor(buildAid, 0);
final RecordDescriptor probeRd = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
final IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
- final ITuplePairComparator nljComparatorProbe2Build = tuplePairComparatorFactoryProbe2Build
- .createTuplePairComparator(ctx);
- final ITuplePairComparator nljComparatorBuild2Probe = tuplePairComparatorFactoryBuild2Probe
- .createTuplePairComparator(ctx);
- final IPredicateEvaluator predEvaluator = predEvaluatorFactory == null ? null
- : predEvaluatorFactory.createPredicateEvaluator();
+ final ITuplePairComparator nljComparatorProbe2Build =
+ tuplePairComparatorFactoryProbe2Build.createTuplePairComparator(ctx);
+ final ITuplePairComparator nljComparatorBuild2Probe =
+ tuplePairComparatorFactoryBuild2Probe.createTuplePairComparator(ctx);
+ final IPredicateEvaluator predEvaluator =
+ predEvaluatorFactory == null ? null : predEvaluatorFactory.createPredicateEvaluator();
for (int i = 0; i < comparatorFactories.length; i++) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
- final IMissingWriter[] nonMatchWriter = isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length]
- : null;
- final ArrayTupleBuilder nullTupleBuild = isLeftOuter ? new ArrayTupleBuilder(buildRd.getFieldCount())
- : null;
+ final IMissingWriter[] nonMatchWriter =
+ isLeftOuter ? new IMissingWriter[nonMatchWriterFactories.length] : null;
+ final ArrayTupleBuilder nullTupleBuild =
+ isLeftOuter ? new ArrayTupleBuilder(buildRd.getFieldCount()) : null;
if (isLeftOuter) {
DataOutput out = nullTupleBuild.getDataOutput();
for (int i = 0; i < nonMatchWriterFactories.length; i++) {
@@ -432,8 +434,8 @@
}
BitSet partitionStatus = state.hybridHJ.getPartitionStatus();
rPartbuff.reset();
- for (int pid = partitionStatus.nextSetBit(0); pid >= 0; pid = partitionStatus
- .nextSetBit(pid + 1)) {
+ for (int pid = partitionStatus.nextSetBit(0); pid >= 0; pid =
+ partitionStatus.nextSetBit(pid + 1)) {
RunFileReader bReader = state.hybridHJ.getBuildRFReader(pid);
RunFileReader pReader = state.hybridHJ.getProbeRFReader(pid);
@@ -474,10 +476,12 @@
//The buildSideReader should be always the original buildSideReader, so should the probeSideReader
private void joinPartitionPair(RunFileReader buildSideReader, RunFileReader probeSideReader,
int buildSizeInTuple, int probeSizeInTuple, int level) throws HyracksDataException {
- ITuplePartitionComputer probeHpc = new FieldHashPartitionComputerFamily(probeKeys,
- hashFunctionGeneratorFactories).createPartitioner(level);
- ITuplePartitionComputer buildHpc = new FieldHashPartitionComputerFamily(buildKeys,
- hashFunctionGeneratorFactories).createPartitioner(level);
+ ITuplePartitionComputer probeHpc =
+ new FieldHashPartitionComputerFamily(probeKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(level);
+ ITuplePartitionComputer buildHpc =
+ new FieldHashPartitionComputerFamily(buildKeys, hashFunctionGeneratorFactories)
+ .createPartitioner(level);
int frameSize = ctx.getInitialFrameSize();
long buildPartSize = (long) Math.ceil((double) buildSideReader.getFileSize() / (double) frameSize);
@@ -492,10 +496,10 @@
}
// Calculate the expected hash table size for the both side.
- long expectedHashTableSizeForBuildInFrame = SerializableHashTable
- .getExpectedTableFrameCount(buildSizeInTuple, frameSize);
- long expectedHashTableSizeForProbeInFrame = SerializableHashTable
- .getExpectedTableFrameCount(probeSizeInTuple, frameSize);
+ long expectedHashTableSizeForBuildInFrame =
+ SerializableHashTable.getExpectedTableFrameCount(buildSizeInTuple, frameSize);
+ long expectedHashTableSizeForProbeInFrame =
+ SerializableHashTable.getExpectedTableFrameCount(probeSizeInTuple, frameSize);
//Apply in-Mem HJ if possible
if (!skipInMemoryHJ && ((buildPartSize + expectedHashTableSizeForBuildInFrame < state.memForJoin)
@@ -646,9 +650,8 @@
} else { //Case 2.1.2 - Switch to NLJ
if (LOGGER.isDebugEnabled()) {
- LOGGER.debug(
- "\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH "
- + "(isLeftOuter || build<probe) - [Level " + level + "]");
+ LOGGER.debug("\t\t>>>Case 2.1.2 - SWITCHED to NLJ RecursiveHHJ WITH "
+ + "(isLeftOuter || build<probe) - [Level " + level + "]");
}
for (int rPid = rPStatus.nextSetBit(0); rPid >= 0; rPid = rPStatus.nextSetBit(rPid + 1)) {
RunFileReader rbrfw = rHHj.getBuildRFReader(rPid);
@@ -696,8 +699,8 @@
probeTupleAccessor.reset(rPartbuff.getBuffer());
for (int tid = 0; tid < probeTupleAccessor.getTupleCount(); tid++) {
FrameUtils.appendConcatToWriter(writer, nullResultAppender, probeTupleAccessor, tid,
- nullTupleBuild.getFieldEndOffsets(), nullTupleBuild.getByteArray(), 0,
- nullTupleBuild.getSize());
+ nullTupleBuild.getFieldEndOffsets(), nullTupleBuild.getByteArray(), 0,
+ nullTupleBuild.getSize());
}
}
nullResultAppender.write(writer, true);
@@ -713,13 +716,13 @@
boolean isReversed = pKeys == OptimizedHybridHashJoinOperatorDescriptor.this.buildKeys
&& bKeys == OptimizedHybridHashJoinOperatorDescriptor.this.probeKeys;
assert isLeftOuter ? !isReversed : true : "LeftOut Join can not reverse roles";
- IDeallocatableFramePool framePool = new DeallocatableFramePool(ctx,
- state.memForJoin * ctx.getInitialFrameSize());
+ IDeallocatableFramePool framePool =
+ new DeallocatableFramePool(ctx, state.memForJoin * ctx.getInitialFrameSize());
ISimpleFrameBufferManager bufferManager = new FramePoolBackedFrameBufferManager(framePool);
ISerializableTable table = new SerializableHashTable(tabSize, ctx, bufferManager);
- InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRDesc),
- hpcRepProbe, new FrameTupleAccessor(buildRDesc), buildRDesc, hpcRepBuild,
+ InMemoryHashJoin joiner = new InMemoryHashJoin(ctx, new FrameTupleAccessor(probeRDesc), hpcRepProbe,
+ new FrameTupleAccessor(buildRDesc), buildRDesc, hpcRepBuild,
new FrameTuplePairComparator(pKeys, bKeys, comparators), isLeftOuter, nonMatchWriter, table,
predEvaluator, isReversed, bufferManager);
@@ -777,11 +780,11 @@
// Hence the reverse relation is different.
boolean isReversed = outerRd == buildRd && innerRd == probeRd;
assert isLeftOuter ? !isReversed : true : "LeftOut Join can not reverse roles";
- ITuplePairComparator nljComptorOuterInner = isReversed ? nljComparatorBuild2Probe
- : nljComparatorProbe2Build;
- NestedLoopJoin nlj = new NestedLoopJoin(ctx, new FrameTupleAccessor(outerRd),
- new FrameTupleAccessor(innerRd), nljComptorOuterInner, memorySize, predEvaluator,
- isLeftOuter, nonMatchWriter);
+ ITuplePairComparator nljComptorOuterInner =
+ isReversed ? nljComparatorBuild2Probe : nljComparatorProbe2Build;
+ NestedLoopJoin nlj =
+ new NestedLoopJoin(ctx, new FrameTupleAccessor(outerRd), new FrameTupleAccessor(innerRd),
+ nljComptorOuterInner, memorySize, predEvaluator, isLeftOuter, nonMatchWriter);
nlj.setIsReversed(isReversed);
IFrame cacheBuff = new VSizeFrame(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
index 0629168..7b687c4 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/ConstantTupleSourceOperatorDescriptor.java
@@ -34,8 +34,8 @@
private byte[] tupleData;
private int tupleSize;
- public ConstantTupleSourceOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc, int[] fieldSlots,
- byte[] tupleData, int tupleSize) {
+ public ConstantTupleSourceOperatorDescriptor(IOperatorDescriptorRegistry spec, RecordDescriptor recDesc,
+ int[] fieldSlots, byte[] tupleData, int tupleSize) {
super(spec, 0, 1);
this.tupleData = tupleData;
this.fieldSlots = fieldSlots;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
index 156198a..29c8fed 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/IdentityOperatorDescriptor.java
@@ -40,7 +40,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
@Override
public void open() throws HyracksDataException {
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
index 4fc1ad2..a190686 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/LimitOperatorDescriptor.java
@@ -46,7 +46,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
private FrameTupleAccessor fta;
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
index 31cbaad..6ba11ca 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
@@ -59,8 +59,8 @@
}
public void open(IHyracksTaskContext ctx) throws HyracksDataException {
- FileReference file = ctx.getJobletContext()
- .createManagedWorkspaceFile(MaterializerTaskState.class.getSimpleName());
+ FileReference file =
+ ctx.getJobletContext().createManagedWorkspaceFile(MaterializerTaskState.class.getSimpleName());
out = new RunFileWriter(file, ctx.getIoManager());
out.open();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
index d3e87d4..3f97752 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
@@ -59,8 +59,8 @@
@Override
public void contributeActivities(IActivityGraphBuilder builder) {
if (isSingleActivity) {
- MaterializerReaderActivityNode mra = new MaterializerReaderActivityNode(
- new ActivityId(odId, MATERIALIZER_READER_ACTIVITY_ID));
+ MaterializerReaderActivityNode mra =
+ new MaterializerReaderActivityNode(new ActivityId(odId, MATERIALIZER_READER_ACTIVITY_ID));
builder.addActivity(this, mra);
builder.addSourceEdge(0, mra, 0);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
index d081bdb..b0cc40c 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/result/ResultWriterOperatorDescriptor.java
@@ -76,8 +76,8 @@
PrintStream printStream = new PrintStream(frameOutputStream);
final RecordDescriptor outRecordDesc = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
- final IResultSerializer resultSerializer = resultSerializerFactory.createResultSerializer(outRecordDesc,
- printStream);
+ final IResultSerializer resultSerializer =
+ resultSerializerFactory.createResultSerializer(outRecordDesc, printStream);
final FrameTupleAccessor frameTupleAccessor = new FrameTupleAccessor(outRecordDesc);
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java
index 3cbe86b..4d9e813 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/RunMergingFrameReader.java
@@ -72,9 +72,8 @@
// right now we didn't take multiple key normalizers for frame merger, since during this step it won't be
// too many cache misses (merging multiple runs sequentially).
// but still, we can apply a special optimization if there is only 1 sort field
- this.normalizedKeyDecisive =
- nmkComputer != null ? nmkComputer.getNormalizedKeyProperties().isDecisive() && comparators.length == 1
- : false;
+ this.normalizedKeyDecisive = nmkComputer != null
+ ? nmkComputer.getNormalizedKeyProperties().isDecisive() && comparators.length == 1 : false;
this.recordDesc = recordDesc;
this.topK = topK;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
index a90d48f..dea770a 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
@@ -41,9 +41,9 @@
public TopKSorterOperatorDescriptor(IOperatorDescriptorRegistry spec, int framesLimit, int topK, int[] sortFields,
INormalizedKeyComputerFactory firstKeyNormalizerFactory, IBinaryComparatorFactory[] comparatorFactories,
RecordDescriptor recordDescriptor) {
- this(spec, framesLimit, topK, sortFields,
- firstKeyNormalizerFactory != null ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory }
- : null,
+ this(spec, framesLimit, topK,
+ sortFields, firstKeyNormalizerFactory != null
+ ? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory } : null,
comparatorFactories, recordDescriptor);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java
index 7d4db64..8ff77ca 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppender.java
@@ -42,7 +42,7 @@
private int tupleCount;
private int freeDataEndOffset;
private int deletedSpace;
- private byte[] array; // to speed up the array visit a little
+ private byte[] array; // to speed up the array visit a little
public DeletableFrameTupleAppender(RecordDescriptor recordDescriptor) {
this.recordDescriptor = recordDescriptor;
@@ -146,7 +146,7 @@
endOffset = getTupleEndOffset(i);
if (endOffset >= 0) {
int length = endOffset - startOffset;
- assert ( length >= 0);
+ assert (length >= 0);
if (freeDataEndOffset != startOffset) {
System.arraycopy(array, startOffset, array, freeDataEndOffset, length);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java
index d9460aa..1f45032 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/util/GroupVSizeFrame.java
@@ -26,8 +26,7 @@
public class GroupVSizeFrame extends VSizeFrame {
- public GroupVSizeFrame(IHyracksCommonContext ctx, int frameSize)
- throws HyracksDataException {
+ public GroupVSizeFrame(IHyracksCommonContext ctx, int frameSize) throws HyracksDataException {
super(ctx, frameSize);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java
index 1b202e5..7cb6356 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/IResetableComparable.java
@@ -19,5 +19,5 @@
package org.apache.hyracks.dataflow.std.structures;
-public interface IResetableComparable<T> extends IResetable<T>, Comparable<T>{
+public interface IResetableComparable<T> extends IResetable<T>, Comparable<T> {
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java
index 015ddb3..51f9984 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/ISerializableTable.java
@@ -57,8 +57,7 @@
* @return the number of frames that are reclaimed.
* @throws HyracksDataException
*/
- int collectGarbage(ITuplePointerAccessor bufferAccessor, ITuplePartitionComputer tpc)
- throws HyracksDataException;
+ int collectGarbage(ITuplePointerAccessor bufferAccessor, ITuplePartitionComputer tpc) throws HyracksDataException;
/**
* Prints out the internal information of this table.
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java
index 4c9d05d..87d17da 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/MaxHeap.java
@@ -64,4 +64,3 @@
trickleDown(0);
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java
index ca97be3..e6da7c9 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SerializableHashTable.java
@@ -45,8 +45,7 @@
}
public SerializableHashTable(int tableSize, final IHyracksFrameMgrContext ctx,
- ISimpleFrameBufferManager bufferManager, double garbageCollectionThreshold)
- throws HyracksDataException {
+ ISimpleFrameBufferManager bufferManager, double garbageCollectionThreshold) throws HyracksDataException {
super(tableSize, ctx, false);
this.bufferManager = bufferManager;
@@ -142,8 +141,8 @@
// Step #2. Advances the reader until it hits the end of the given frame.
while (gcInfo.currentReadIntOffsetInPageForGC < frameCapacity) {
- nextSlotIntPosInPageForGC = findNextSlotInPage(currentReadContentFrameForGC,
- gcInfo.currentReadIntOffsetInPageForGC);
+ nextSlotIntPosInPageForGC =
+ findNextSlotInPage(currentReadContentFrameForGC, gcInfo.currentReadIntOffsetInPageForGC);
if (nextSlotIntPosInPageForGC == INVALID_VALUE) {
// There isn't a valid slot in the page. Exits the loop #2 and reads the next frame.
@@ -174,8 +173,8 @@
}
// Migrates this slot to the current offset in Writer's Frame if possible.
- currentPageChanged = MigrateSlot(gcInfo, bufferAccessor, tpc, capacityInIntCount,
- nextSlotIntPosInPageForGC);
+ currentPageChanged =
+ MigrateSlot(gcInfo, bufferAccessor, tpc, capacityInIntCount, nextSlotIntPosInPageForGC);
if (currentPageChanged) {
currentReadContentFrameForGC = contents.get(gcInfo.currentReadPageForGC);
@@ -385,8 +384,8 @@
* given tuple pointer.
*/
private void updateHeaderToContentPointerInHeaderFrame(ITuplePointerAccessor bufferAccessor,
- ITuplePartitionComputer tpc, TuplePointer hashedTuple, int newContentFrame,
- int newOffsetInContentFrame) throws HyracksDataException {
+ ITuplePartitionComputer tpc, TuplePointer hashedTuple, int newContentFrame, int newOffsetInContentFrame)
+ throws HyracksDataException {
// Finds the original hash value. We assume that bufferAccessor and tpc is already assigned.
bufferAccessor.reset(hashedTuple);
int entry = tpc.partition(bufferAccessor, hashedTuple.getTupleIndex(), tableSize);
@@ -401,7 +400,6 @@
headerFrame.writeInt(offsetInHeaderFrame + 1, newOffsetInContentFrame);
}
-
/**
* Tries to find the next valid slot position in the given content frame from the current position.
*/
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java
index 6497a53..cfc58ce 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/structures/SimpleSerializableHashTable.java
@@ -309,16 +309,16 @@
lastContentFrame.writeInt(lastOffsetInCurrentFrame + 2, pointer.getFrameIndex());
lastContentFrame.writeInt(lastOffsetInCurrentFrame + 3, pointer.getTupleIndex());
int newLastOffsetInContentFrame = lastOffsetInCurrentFrame + entryCapacity * 2;
- newLastOffsetInContentFrame = newLastOffsetInContentFrame < frameCapacity ? newLastOffsetInContentFrame
- : frameCapacity - 1;
+ newLastOffsetInContentFrame =
+ newLastOffsetInContentFrame < frameCapacity ? newLastOffsetInContentFrame : frameCapacity - 1;
currentOffsetInEachFrameList.set(currentFrameNumber, newLastOffsetInContentFrame);
requiredIntCapacity = entryCapacity * 2 - (frameCapacity - lastOffsetInCurrentFrame);
while (requiredIntCapacity > 0) {
currentFrameNumber++;
requiredIntCapacity -= frameCapacity;
- newLastOffsetInContentFrame = requiredIntCapacity < 0 ? requiredIntCapacity + frameCapacity
- : frameCapacity - 1;
+ newLastOffsetInContentFrame =
+ requiredIntCapacity < 0 ? requiredIntCapacity + frameCapacity : frameCapacity - 1;
currentOffsetInEachFrameList.set(currentFrameNumber, newLastOffsetInContentFrame);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
index 967977e..a03d8d7 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
@@ -62,7 +62,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new UnionOperator();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
index cc6c2d9..2681fe6 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/util/ReferencedPriorityQueue.java
@@ -113,7 +113,7 @@
ReferenceEntry tmp = entries[slot];
entries[slot] = curr;
curr = tmp;// winner to pass up
- }// else curr wins
+ } // else curr wins
slot >>= 1;
}
// set new entries[0]
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
index 11148a2..0e42397 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/AbstractTupleMemoryManagerTest.java
@@ -40,8 +40,8 @@
import org.apache.hyracks.util.IntSerDeUtils;
public abstract class AbstractTupleMemoryManagerTest {
- ISerializerDeserializer[] fieldsSerDer = new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldsSerDer = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ new UTF8StringSerializerDeserializer() };
RecordDescriptor recordDescriptor = new RecordDescriptor(fieldsSerDer);
ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(recordDescriptor.getFieldCount());
FrameTupleAccessor inFTA = new FrameTupleAccessor(recordDescriptor);
@@ -61,9 +61,7 @@
assertEquals(map.size(), mapInserted.size());
}
- protected Map<Integer, Integer> prepareFixedSizeTuples(
- int tuplePerFrame,
- int extraMetaBytePerFrame,
+ protected Map<Integer, Integer> prepareFixedSizeTuples(int tuplePerFrame, int extraMetaBytePerFrame,
int extraMetaBytePerRecord) throws HyracksDataException {
Map<Integer, Integer> dataSet = new HashMap<>();
ByteBuffer buffer = ByteBuffer.allocate(Common.BUDGET);
@@ -72,8 +70,7 @@
appender.reset(frame, true);
int sizePerTuple = (Common.MIN_FRAME_SIZE - 1 - tuplePerFrame * 4 - 4 - extraMetaBytePerFrame) / tuplePerFrame;
- int sizeChar =
- sizePerTuple - extraMetaBytePerRecord - fieldsSerDer.length * 4 - 4 - 2; //2byte to write str length
+ int sizeChar = sizePerTuple - extraMetaBytePerRecord - fieldsSerDer.length * 4 - 4 - 2; //2byte to write str length
assert (sizeChar > 0);
for (int i = 0; i < Common.NUM_MIN_FRAME * tuplePerFrame; i++) {
tupleBuilder.reset();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java
index 992c7f6..e5a4091 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/FrameFreeSlotBestFitUsingTreeMapTest.java
@@ -54,11 +54,10 @@
}
@Test
- public void testReset(){
+ public void testReset() {
testAll();
policy.reset();
testAll();
}
-
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java
index 9d4a9a1..21680eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableFramesMemoryManagerTest.java
@@ -157,12 +157,12 @@
framesMemoryManager.getFrame(i, info);
fta.reset(info.getBuffer(), info.getStartOffset(), info.getLength());
for (int t = 0; t < fta.getTupleCount(); t++) {
- int id = parseTuple(fta.getBuffer(), fta.getTupleStartOffset(t) + fta.getFieldSlotsLength() + fta
- .getFieldStartOffset(t, 0));
+ int id = parseTuple(fta.getBuffer(),
+ fta.getTupleStartOffset(t) + fta.getFieldSlotsLength() + fta.getFieldStartOffset(t, 0));
// System.out.println("frameid:" + i + ",tuple:" + t + ",has id:" + id + ",length:" +
// (fta.getTupleEndOffset(t) - fta.getTupleStartOffset(t) - fta.getFieldSlotsLength()));
- assertTrue(tupleSet.remove(id) == fta.getTupleEndOffset(t) - fta.getTupleStartOffset(t) - fta
- .getFieldSlotsLength());
+ assertTrue(tupleSet.remove(id) == fta.getTupleEndOffset(t) - fta.getTupleStartOffset(t)
+ - fta.getFieldSlotsLength());
}
}
assertTrue(tupleSet.isEmpty());
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java
index e2a231f..8cc6df3 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/buffermanager/VariableTupleMemoryManagerTest.java
@@ -155,8 +155,7 @@
}
private ByteBuffer deleteRandomSelectedTuples(Map<Integer, Integer> map, Map<TuplePointer, Integer> mapInserted,
- int minNumOfRecordTobeDeleted)
- throws HyracksDataException {
+ int minNumOfRecordTobeDeleted) throws HyracksDataException {
ByteBuffer buffer = ByteBuffer.allocate(Common.BUDGET);
FixedSizeFrame frame = new FixedSizeFrame(buffer);
FrameTupleAppender appender = new FrameTupleAppender();
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java
index 7686540..468f879 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/sort/util/DeletableFrameTupleAppenderTest.java
@@ -39,10 +39,8 @@
public class DeletableFrameTupleAppenderTest {
DeletableFrameTupleAppender appender;
- ISerializerDeserializer[] fields = new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(),
- };
+ ISerializerDeserializer[] fields = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ new UTF8StringSerializerDeserializer(), };
RecordDescriptor recordDescriptor = new RecordDescriptor(fields);
ArrayTupleBuilder builder = new ArrayTupleBuilder(recordDescriptor.getFieldCount());
static final char TEST_CH = 'x';
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java
index 16b24c4..7e31956 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MaxHeapTest.java
@@ -97,7 +97,7 @@
maxHeap.peekMax(peekI);
maxHeap.getMax(maxI);
assertTrue(peekI.compareTo(maxI) == 0);
- assertEquals( i++, capacity - 1 - maxI.i);
+ assertEquals(i++, capacity - 1 - maxI.i);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java
index 1a5fba3..be10095 100644
--- a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/test/java/org/apache/hyracks/dataflow/std/structures/MinHeapTest.java
@@ -25,7 +25,7 @@
import org.junit.Test;
-public class MinHeapTest extends AbstracHeapTest{
+public class MinHeapTest extends AbstracHeapTest {
@Test
public void testInitialMinHeap() {
@@ -102,5 +102,4 @@
}
}
-
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
index 48377e3..8ac34d8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -144,7 +144,8 @@
// B-Tree tuple, etc.
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- IIndexDataflowHelperFactory primaryHelperFactory = new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
+ IIndexDataflowHelperFactory primaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
// create operator descriptor
TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert =
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index 203d22c..2fb1cee 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -145,7 +145,8 @@
// to field 0 of B-Tree tuple,
// etc.
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
+ IIndexDataflowHelperFactory dataflowHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, recDesc,
fieldPermutation, 0.7f, false, 1000L, true, dataflowHelperFactory);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index 603dc6b..c32b72c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -139,7 +139,8 @@
// into search op
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
+ IIndexDataflowHelperFactory dataflowHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, lowKeyFields,
highKeyFields, true, true, dataflowHelperFactory, false, false, null,
NoOpOperationCallbackFactory.INSTANCE, null, null, false);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 7507f10..a8bea08 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -117,7 +117,8 @@
// use a disk-order scan to read primary index
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- IIndexDataflowHelperFactory primaryHelperFactory = new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
+ IIndexDataflowHelperFactory primaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec,
recDesc, primaryHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
@@ -139,7 +140,8 @@
// tuple
int[] fieldPermutation = { 1, 0 };
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
- IIndexDataflowHelperFactory secondaryHelperFactory = new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
+ IIndexDataflowHelperFactory secondaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, null,
fieldPermutation, 0.7f, false, 1000L, true, secondaryHelperFactory);
JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index 1e909ef..ccf20fe 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -183,7 +183,8 @@
// op
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- IIndexDataflowHelperFactory primaryHelperFactory = new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
+ IIndexDataflowHelperFactory primaryHelperFactory =
+ new IndexDataflowHelperFactory(storageManager, primarySplitProvider);
BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
primaryLowKeyFields, primaryHighKeyFields, true, true, primaryHelperFactory, false, false, null,
NoOpOperationCallbackFactory.INSTANCE, null, null, false);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java
index d6775cc..db51ed2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/DataSetConstants.java
@@ -31,19 +31,17 @@
public class DataSetConstants {
- public static final RecordDescriptor inputRecordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ public static final RecordDescriptor inputRecordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- public static final IValueParserFactory[] inputParserFactories =
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE };
+ public static final IValueParserFactory[] inputParserFactories = new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
// field, type and key declarations for primary index
public static int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
@@ -54,10 +52,9 @@
public static final IBinaryComparatorFactory[] filterCmpFactories =
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
- public static final ITypeTraits[] primaryTypeTraits =
- new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS,
- UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS,
- UTF8StringPointable.TYPE_TRAITS };
+ public static final ITypeTraits[] primaryTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS,
+ UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS,
+ UTF8StringPointable.TYPE_TRAITS, UTF8StringPointable.TYPE_TRAITS };
public static final IBinaryComparatorFactory[] primaryComparatorFactories =
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
@@ -65,18 +62,16 @@
public static final int[] primaryBloomFilterKeyFields = new int[] { 0 };
- public static final RecordDescriptor primaryRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ public static final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- public static final RecordDescriptor primaryAndFilterRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ public static final RecordDescriptor primaryAndFilterRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
// field, type and key declarations for secondary indexes
@@ -94,11 +89,10 @@
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
- public static final RecordDescriptor secondaryRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
- public static final RecordDescriptor secondaryWithFilterRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ public static final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
+ public static final RecordDescriptor secondaryWithFilterRecDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer() });
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
index d230f38..5b091a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
@@ -39,7 +39,6 @@
public static final boolean IS_POINT_MBR = false;
public static final boolean DURABLE = true;
-
public LSMRTreeWithAntiMatterTuplesOperatorTestHelper(IOManager ioManager) {
super(ioManager);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
index 7a675bc..1bb58b8 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/comm/SerializationDeserializationTest.java
@@ -48,8 +48,8 @@
public class SerializationDeserializationTest {
private static final Logger LOGGER = LogManager.getLogger();
- private static final String DBLP_FILE = "data" + File.separator + "device1" + File.separator + "data"
- + File.separator + "dblp.txt";
+ private static final String DBLP_FILE =
+ "data" + File.separator + "device1" + File.separator + "data" + File.separator + "dblp.txt";
private static class SerDeserRunner {
private final IHyracksTaskContext ctx;
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
index ae27ac9..0931501 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -106,7 +106,7 @@
ncConfig1.setClusterListenAddress("127.0.0.1");
ncConfig1.setDataListenAddress("127.0.0.1");
ncConfig1.setResultListenAddress("127.0.0.1");
- ncConfig1.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
+ ncConfig1.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device0") });
nc1 = new NodeControllerService(ncConfig1);
nc1.start();
@@ -116,7 +116,7 @@
ncConfig2.setClusterListenAddress("127.0.0.1");
ncConfig2.setDataListenAddress("127.0.0.1");
ncConfig2.setResultListenAddress("127.0.0.1");
- ncConfig2.setIODevices(new String [] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
+ ncConfig2.setIODevices(new String[] { joinPath(System.getProperty("user.dir"), "target", "data", "device1") });
nc2 = new NodeControllerService(ncConfig2);
nc2.start();
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index 7100895..58da8a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -245,8 +245,7 @@
@Override
public JobSubmissionStatus allocate(JobSpecification job) throws HyracksException {
return maxRAM > job.getRequiredClusterCapacity().getAggregatedMemoryByteSize()
- ? JobSubmissionStatus.EXECUTE
- : JobSubmissionStatus.QUEUE;
+ ? JobSubmissionStatus.EXECUTE : JobSubmissionStatus.QUEUE;
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
index 080746c..752c643 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/AggregationTest.java
@@ -73,9 +73,8 @@
*/
public class AggregationTest extends AbstractIntegrationTest {
- final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
- new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.002" + File.separator
- + "lineitem.tbl") });
+ final IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.002" + File.separator + "lineitem.tbl") });
final RecordDescriptor desc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
@@ -110,8 +109,8 @@
public void singleKeySumPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -150,8 +149,8 @@
public void singleKeySumExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -164,18 +163,19 @@
int tableSize = 8;
long fileSize = frameLimits * spec.getFrameSize();
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, frameLimits,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
- new FloatSumFieldAggregatorFactory(5, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new FloatSumFieldAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields, frameLimits,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
+ new FloatSumFieldAggregatorFactory(5, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new FloatSumFieldAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
@@ -199,8 +199,8 @@
public void singleKeyAvgPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -239,8 +239,8 @@
public void singleKeyAvgExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -253,18 +253,19 @@
int tableSize = 8;
long fileSize = frameLimits * spec.getFrameSize();
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, frameLimits,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
- new CountFieldAggregatorFactory(false), new AvgFieldGroupAggregatorFactory(1, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new AvgFieldMergeAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields, frameLimits,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new CountFieldAggregatorFactory(false),
+ new AvgFieldGroupAggregatorFactory(1, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new AvgFieldMergeAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
@@ -288,13 +289,13 @@
public void singleKeyMinMaxStringPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() });
int[] keyFields = new int[] { 0 };
@@ -328,13 +329,13 @@
public void singleKeyMinMaxStringExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer() });
int[] keyFields = new int[] { 0 };
@@ -342,18 +343,19 @@
int tableSize = 8;
long fileSize = frameLimits * spec.getFrameSize();
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, frameLimits,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
- new MinMaxStringFieldAggregatorFactory(15, true, true) }),
- new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
- new MinMaxStringFieldAggregatorFactory(2, true, true) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields, frameLimits,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+ new MinMaxStringFieldAggregatorFactory(15, true, true) }),
+ new MultiFieldsAggregatorFactory(
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+ new MinMaxStringFieldAggregatorFactory(2, true, true) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, NC2_ID, NC1_ID);
@@ -377,8 +379,8 @@
public void multiKeySumPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -419,8 +421,8 @@
public void multiKeySumExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -469,13 +471,13 @@
public void multiKeyAvgPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
@@ -513,13 +515,13 @@
public void multiKeyAvgExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
- RecordDescriptor outputRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor outputRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE, FloatSerializerDeserializer.INSTANCE });
@@ -566,8 +568,8 @@
public void multiKeyMinMaxStringPreClusterGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
@@ -609,8 +611,8 @@
public void multiKeyMinMaxStringExtGroupTest() throws Exception {
JobSpecification spec = new JobSpecification();
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory,
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, tupleParserFactory, desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
index 7eba9e7..ec3b8f1 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CancelJobTest.java
@@ -230,8 +230,8 @@
FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(ASTERIX_IDS[0],
"data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor recordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor recordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
index c05b504..c28a5aa 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/CountOfCountsTest.java
@@ -64,14 +64,12 @@
FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 0 },
@@ -84,11 +82,13 @@
PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC2_ID);
InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, desc2);
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC2_ID);
RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -96,7 +96,8 @@
PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc3);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -106,18 +107,16 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, csvScanner, 0, sorter, 0);
IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
spec.connect(conn2, sorter, 0, group, 0);
IConnectorDescriptor conn3 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn3, group, 0, sorter2, 0);
IConnectorDescriptor conn4 = new OneToOneConnectorDescriptor(spec);
@@ -136,14 +135,12 @@
FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 0 },
@@ -156,11 +153,13 @@
PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID, NC2_ID, NC1_ID, NC2_ID);
InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, desc2);
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC1_ID, NC2_ID);
RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -168,7 +167,8 @@
PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc3);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -179,18 +179,16 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, csvScanner, 0, sorter, 0);
IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
spec.connect(conn2, sorter, 0, group, 0);
IConnectorDescriptor conn3 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn3, group, 0, sorter2, 0);
IConnectorDescriptor conn4 = new OneToOneConnectorDescriptor(spec);
@@ -209,14 +207,12 @@
FileSplit[] splits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt") };
IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits);
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID);
ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 3, new int[] { 0 },
@@ -229,11 +225,13 @@
PreclusteredGroupOperatorDescriptor group = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 0 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc2);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group, NC1_ID, NC2_ID, NC1_ID, NC2_ID);
InMemorySortOperatorDescriptor sorter2 = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, desc2);
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
+ desc2);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter2, NC1_ID, NC2_ID);
RecordDescriptor desc3 = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -241,7 +239,8 @@
PreclusteredGroupOperatorDescriptor group2 = new PreclusteredGroupOperatorDescriptor(spec, new int[] { 1 },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) },
new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }), desc3);
+ new IFieldAggregateDescriptorFactory[] { new CountFieldAggregatorFactory(true) }),
+ desc3);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, group2, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -252,18 +251,16 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, csvScanner, 0, sorter, 0);
IConnectorDescriptor conn2 = new OneToOneConnectorDescriptor(spec);
spec.connect(conn2, sorter, 0, group, 0);
IConnectorDescriptor conn3 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn3, group, 0, sorter2, 0);
IConnectorDescriptor conn4 = new OneToOneConnectorDescriptor(spec);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
index 553c5b5..40b6b27 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/DeployedJobSpecsTest.java
@@ -134,8 +134,10 @@
verify(nc2, Mockito.timeout(TIME_THRESHOLD).times(2)).checkForDuplicateDeployedJobSpec(any());
//confirm that both jobs are distributed
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) != null && nc2.getActivityClusterGraph(distributedId1) != null);
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) != null && nc2.getActivityClusterGraph(distributedId2) != null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) != null
+ && nc2.getActivityClusterGraph(distributedId1) != null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) != null
+ && nc2.getActivityClusterGraph(distributedId2) != null);
Assert.assertTrue(cc.getDeployedJobSpecStore().getDeployedJobSpecDescriptor(distributedId1) != null);
Assert.assertTrue(cc.getDeployedJobSpecStore().getDeployedJobSpecDescriptor(distributedId2) != null);
@@ -157,7 +159,8 @@
verify(nc2, Mockito.timeout(TIME_THRESHOLD).times(1)).removeActivityClusterGraph(any());
//confirm the first job is destroyed
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) == null && nc2.getActivityClusterGraph(distributedId1) == null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId1) == null
+ && nc2.getActivityClusterGraph(distributedId1) == null);
cc.getDeployedJobSpecStore().checkForExistingDeployedJobSpecDescriptor(distributedId1);
//run the second job
@@ -187,7 +190,8 @@
verify(nc2, Mockito.timeout(TIME_THRESHOLD).times(2)).removeActivityClusterGraph(any());
//confirm the second job is destroyed
- Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) == null && nc2.getActivityClusterGraph(distributedId2) == null);
+ Assert.assertTrue(nc1.getActivityClusterGraph(distributedId2) == null
+ && nc2.getActivityClusterGraph(distributedId2) == null);
cc.getDeployedJobSpecStore().checkForExistingDeployedJobSpecDescriptor(distributedId2);
//run the second job 100 times in parallel
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
index b693b09..9e795bf 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/HeapSortMergeTest.java
@@ -81,11 +81,13 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
int outputLimit = 5; // larger than the total record numbers.
- TopKSorterOperatorDescriptor sorter = new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
- (INormalizedKeyComputerFactory) null,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- ordersDesc);
+ TopKSorterOperatorDescriptor sorter =
+ new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
+ (INormalizedKeyComputerFactory) null,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
@@ -145,11 +147,13 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
int outputLimit = 20;
- TopKSorterOperatorDescriptor sorter = new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
- (INormalizedKeyComputerFactory) null,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- ordersDesc);
+ TopKSorterOperatorDescriptor sorter =
+ new TopKSorterOperatorDescriptor(spec, 4, outputLimit, new int[] { 1, 0 },
+ (INormalizedKeyComputerFactory) null,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
LimitOperatorDescriptor filter = new LimitOperatorDescriptor(spec, ordersDesc, outputLimit);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
index 67845c0..49dee84 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/LocalityAwareConnectorTest.java
@@ -125,18 +125,20 @@
int[] keyFields = new int[] { 0 };
int tableSize = 8;
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, fileSize / spec.getFrameSize() + 1,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
- new FloatSumFieldAggregatorFactory(5, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new FloatSumFieldAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields,
+ fileSize / spec.getFrameSize() + 1,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
+ new FloatSumFieldAggregatorFactory(5, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new FloatSumFieldAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, "asterix-005", "asterix-006");
@@ -190,18 +192,20 @@
int[] keyFields = new int[] { 0 };
int tableSize = 8;
- ExternalGroupOperatorDescriptor grouper = new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize,
- keyFields, fileSize / spec.getFrameSize() + 1,
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory(),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
- new FloatSumFieldAggregatorFactory(5, false) }),
- new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
- new FloatSumFieldAggregatorFactory(3, false) }),
- outputRec, outputRec, new HashSpillableTableFactory(
- new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
+ ExternalGroupOperatorDescriptor grouper =
+ new ExternalGroupOperatorDescriptor(spec, tableSize, fileSize, keyFields,
+ fileSize / spec.getFrameSize() + 1,
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory(),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(3, false),
+ new FloatSumFieldAggregatorFactory(5, false) }),
+ new MultiFieldsAggregatorFactory(new IFieldAggregateDescriptorFactory[] {
+ new IntSumFieldAggregatorFactory(1, false), new IntSumFieldAggregatorFactory(2, false),
+ new FloatSumFieldAggregatorFactory(3, false) }),
+ outputRec, outputRec, new HashSpillableTableFactory(
+ new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE }));
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, grouper, "asterix-005", "asterix-006");
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
index d7d4219..09629b2 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ReplicateOperatorTest.java
@@ -79,11 +79,11 @@
DelimitedDataTupleParserFactory stringParser = new DelimitedDataTupleParserFactory(
new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, '\u0000');
- RecordDescriptor stringRec = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
+ RecordDescriptor stringRec =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), });
- FileScanOperatorDescriptor scanOp = new FileScanOperatorDescriptor(spec, new ConstantFileSplitProvider(
- inputSplits), stringParser, stringRec);
+ FileScanOperatorDescriptor scanOp = new FileScanOperatorDescriptor(spec,
+ new ConstantFileSplitProvider(inputSplits), stringParser, stringRec);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, scanOp, locations);
ReplicateOperatorDescriptor replicateOp = new ReplicateOperatorDescriptor(spec, stringRec, outputArity);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
index 06d7b04..75ba33f 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/ScanPrintTest.java
@@ -54,19 +54,16 @@
public void scanPrint01() throws Exception {
JobSpecification spec = new JobSpecification();
- IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
+ IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
+ new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE },
- ','),
- desc);
+ FileScanOperatorDescriptor csvScanner =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner, NC2_ID, NC1_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -87,22 +84,23 @@
public void scanPrint02() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -113,9 +111,8 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(conn1, ordScanner, 0, printer, 0);
spec.addRoot(printer);
@@ -126,22 +123,23 @@
public void scanPrint03() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
IntegerParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -152,9 +150,8 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC2_ID);
IConnectorDescriptor conn1 = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(IntegerPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(IntegerPointable.FACTORY) }));
spec.connect(conn1, ordScanner, 0, printer, 0);
spec.addRoot(printer);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
index df9c0d7..315b74c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/SortMergeTest.java
@@ -56,24 +56,25 @@
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 1 },
@@ -90,13 +91,13 @@
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
- spec.connect(
- new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
- new int[] { 1 }, new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }), new int[] { 1 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
- .of(UTF8StringPointable.FACTORY) }, new UTF8StringNormalizedKeyComputerFactory()),
- sorter, 0, printer, 0);
+ spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec,
+ new FieldHashPartitionComputerFactory(new int[] { 1 },
+ new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+ new int[] { 1 },
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
runTest(spec);
}
@@ -106,29 +107,33 @@
JobSpecification spec = new JobSpecification();
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 4, new int[] { 1, 0 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+ ExternalSortOperatorDescriptor sorter =
+ new ExternalSortOperatorDescriptor(spec, 4, new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -140,15 +145,14 @@
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
- spec.connect(
- new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(new int[] {
- 1, 0 }, new IBinaryHashFunctionFactory[] {
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), new int[] { 1, 0 },
- new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
+ spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+ new int[] { 1, 0 },
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+ new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
runTest(spec);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
index d6f39ad..289f8ae 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOptimizedHybridHashJoinTest.java
@@ -59,23 +59,23 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
static IValueParserFactory[] custValueParserFactories = new IValueParserFactory[custDesc.getFieldCount()];
static IValueParserFactory[] orderValueParserFactories = new IValueParserFactory[ordersDesc.getFieldCount()];
@@ -86,9 +86,8 @@
}
private IOperatorDescriptor getPrinter(JobSpecification spec, String path) {
- IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
- new FileSplit[] {
- new ManagedFileSplit(NC1_ID, path) });
+ IFileSplitProvider outputSplitProvider =
+ new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID, path) });
return DEBUG ? new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|")
: new NullSinkOperatorDescriptor(spec);
@@ -97,12 +96,12 @@
@Test
public void customerOrderCIDHybridHashJoin_Case1() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer4.tbl") };
+ FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer4.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders4.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders4.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
@@ -117,8 +116,8 @@
1.2, new int[] { 0 }, new int[] { 1 },
new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- custOrderJoinDesc, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
+ custOrderJoinDesc,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
null);
@@ -146,12 +145,12 @@
public void customerOrderCIDHybridHashJoin_Case2() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer3.tbl") };
+ FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer3.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders4.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders4.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
@@ -167,8 +166,8 @@
1.2, new int[] { 0 }, new int[] { 1 },
new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- custOrderJoinDesc, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
+ custOrderJoinDesc,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
null);
@@ -197,12 +196,12 @@
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer3.tbl") };
+ FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer3.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders1.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders1.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
@@ -218,8 +217,8 @@
1.2, new int[] { 0 }, new int[] { 1 },
new IBinaryHashFunctionFamily[] { UTF8StringBinaryHashFunctionFamily.INSTANCE },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- custOrderJoinDesc, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
+ custOrderJoinDesc,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 0, 1),
new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
null);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
index 2c055c2..816f3fa 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -72,8 +72,8 @@
public void customerOrderCIDJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -81,18 +81,18 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -111,14 +111,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
@@ -152,8 +152,8 @@
public void customerOrderCIDHybridHashJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -161,51 +161,48 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
- HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(
- spec,
- 32,
- 20,
- 200,
- 1.2,
- new int[] { 1 },
- new int[] { 0 },
+ HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(spec, 32, 20, 200, 1.2,
+ new int[] { 1 }, new int[] { 0 },
new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
custOrderJoinDesc, null, false, null);
@@ -235,8 +232,8 @@
public void customerOrderCIDInMemoryHashLeftOuterJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -244,18 +241,18 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -274,14 +271,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[ordersDesc.getFieldCount()];
@@ -320,8 +317,8 @@
public void customerOrderCIDHybridHashLeftOuterJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -329,18 +326,18 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -359,14 +356,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[ordersDesc.getFieldCount()];
@@ -406,10 +403,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -418,20 +415,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -450,14 +447,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
@@ -475,15 +472,13 @@
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor ordJoinConn = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 1 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 1 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(ordJoinConn, ordScanner, 0, join, 0);
IConnectorDescriptor custJoinConn = new MToNPartitioningConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) }));
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, new IBinaryHashFunctionFactory[] {
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }));
spec.connect(custJoinConn, custScanner, 0, join, 1);
IConnectorDescriptor joinPrinterConn = new MToNBroadcastConnectorDescriptor(spec);
@@ -498,10 +493,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -510,20 +505,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -542,14 +537,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
HybridHashJoinOperatorDescriptor join = new HybridHashJoinOperatorDescriptor(spec, 5, 20, 100, 1.2,
@@ -588,10 +583,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -600,20 +595,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -632,20 +627,19 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
InMemoryHashJoinOperatorDescriptor join = new InMemoryHashJoinOperatorDescriptor(spec, new int[] { 1 },
new int[] { 0 },
- new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory
- .of(UTF8StringPointable.FACTORY) },
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) },
new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
custOrderJoinDesc, 128, null, 128);
PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
@@ -679,10 +673,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -691,20 +685,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -723,14 +717,14 @@
ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(
- new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE },
- '|'),
- custDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
MaterializingOperatorDescriptor ordMat = new MaterializingOperatorDescriptor(spec, ordersDesc);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
index dc5d0bc..c2b3263 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/TPCHCustomerOrderNestedLoopJoinTest.java
@@ -103,8 +103,8 @@
int fEnd1 = accessor1.getFieldEndOffset(tIndex1, field1);
int fLen1 = fEnd1 - fStart1;
- int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0, accessor1
- .getBuffer().array(), fStart1 + fStartOffset1, fLen1);
+ int c = bComparator.compare(accessor0.getBuffer().array(), fStart0 + fStartOffset0, fLen0,
+ accessor1.getBuffer().array(), fStart1 + fStartOffset1, fLen1);
if (c != 0) {
return c;
}
@@ -127,8 +127,8 @@
public void customerOrderCIDJoin() throws Exception {
JobSpecification spec = new JobSpecification();
- FileSplit[] custSplits = new FileSplit[] { new ManagedFileSplit(NC1_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "customer.tbl") };
+ FileSplit[] custSplits = new FileSplit[] {
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "customer.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -136,46 +136,49 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- FileSplit[] ordersSplits = new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator
- + "tpch0.001" + File.separator + "orders.tbl") };
+ FileSplit[] ordersSplits = new FileSplit[] {
+ new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID);
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 4, false,
- null);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 4, false, null);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -203,10 +206,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -215,48 +218,51 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, false,
- null);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 5, false, null);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
@@ -284,10 +290,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -296,48 +302,51 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 6, false,
- null);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 6, false, null);
PartitionConstraintHelper.addPartitionCountConstraint(spec, join, 2);
ResultSetId rsId = new ResultSetId(1);
@@ -365,10 +374,10 @@
JobSpecification spec = new JobSpecification();
FileSplit[] custSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "customer-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "customer-part2.tbl") };
IFileSplitProvider custSplitsProvider = new ConstantFileSplitProvider(custSplits);
RecordDescriptor custDesc = new RecordDescriptor(new ISerializerDeserializer[] {
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
@@ -377,43 +386,46 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileSplit[] ordersSplits = new FileSplit[] {
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part1.tbl"),
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator
- + "orders-part2.tbl") };
+ new ManagedFileSplit(NC1_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
+ new ManagedFileSplit(NC2_ID,
+ "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitsProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitsProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
- FileScanOperatorDescriptor custScanner = new FileScanOperatorDescriptor(spec, custSplitsProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE }, '|'), custDesc);
+ FileScanOperatorDescriptor custScanner =
+ new FileScanOperatorDescriptor(spec, custSplitsProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] {
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ custDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, custScanner, NC1_ID, NC2_ID);
IMissingWriterFactory[] nonMatchWriterFactories = new IMissingWriterFactory[ordersDesc.getFieldCount()];
@@ -421,9 +433,9 @@
nonMatchWriterFactories[j] = NoopMissingWriterFactory.INSTANCE;
}
- NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec, new JoinComparatorFactory(
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0), custOrderJoinDesc, 5, true,
- nonMatchWriterFactories);
+ NestedLoopJoinOperatorDescriptor join = new NestedLoopJoinOperatorDescriptor(spec,
+ new JoinComparatorFactory(PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY), 1, 0),
+ custOrderJoinDesc, 5, true, nonMatchWriterFactories);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, join, NC1_ID, NC2_ID);
ResultSetId rsId = new ResultSetId(1);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
index e4d6398..81a71eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/UnionTest.java
@@ -51,25 +51,21 @@
public static JobSpecification createUnionJobSpec() throws Exception {
JobSpecification spec = new JobSpecification();
- IFileSplitProvider splitProvider = new ConstantFileSplitProvider(new FileSplit[] {
- new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
- new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
+ IFileSplitProvider splitProvider = new ConstantFileSplitProvider(
+ new FileSplit[] { new ManagedFileSplit(NC2_ID, "data" + File.separator + "words.txt"),
+ new ManagedFileSplit(NC1_ID, "data" + File.separator + "nc1" + File.separator + "words.txt") });
- RecordDescriptor desc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor desc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- FileScanOperatorDescriptor csvScanner01 = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner01 =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner01, NC2_ID, NC1_ID);
- FileScanOperatorDescriptor csvScanner02 = new FileScanOperatorDescriptor(
- spec,
- splitProvider,
- new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','),
- desc);
+ FileScanOperatorDescriptor csvScanner02 =
+ new FileScanOperatorDescriptor(spec, splitProvider, new DelimitedDataTupleParserFactory(
+ new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE }, ','), desc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, csvScanner02, NC2_ID, NC1_ID);
UnionAllOperatorDescriptor unionAll = new UnionAllOperatorDescriptor(spec, 2, desc);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
index 29e1d6e..9761f4d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/integration/VSizeFrameSortMergeTest.java
@@ -56,12 +56,12 @@
new ManagedFileSplit(NC1_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part1.tbl"),
new ManagedFileSplit(NC2_ID, "data" + File.separator + "tpch0.001" + File.separator + "orders-part2.tbl") };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
- RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
@Test
public void sortNormalMergeTest() throws Exception {
@@ -84,34 +84,37 @@
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'),
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
spec.setFrameSize(frameSize);
- ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, frameLimit, new int[] { 1, 0 },
- new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+ ExternalSortOperatorDescriptor sorter =
+ new ExternalSortOperatorDescriptor(spec, frameLimit, new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] {
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ ordersDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
String path = getClass().getSimpleName() + aInteger.getAndIncrement() + ".tmp";
- IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
- new FileSplit[] { new ManagedFileSplit(NC1_ID, path) });
+ IFileSplitProvider outputSplitProvider =
+ new ConstantFileSplitProvider(new FileSplit[] { new ManagedFileSplit(NC1_ID, path) });
IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|");
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
- spec.connect(
- new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(new int[] {
- 1, 0 }, new IBinaryHashFunctionFactory[] {
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), new int[] { 1, 0 },
- new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
- new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
+ spec.connect(new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(
+ new int[] { 1, 0 },
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }),
+ new int[] { 1, 0 },
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+ new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
spec.addRoot(printer);
runTest(spec);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java
index ef9e4b6..4d3215d 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/ErrorReportingTest.java
@@ -139,7 +139,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new IOperatorNodePushable() {
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
index 8b0b8a0..25c9d5c 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/rewriting/SuperActivityRewritingTest.java
@@ -123,7 +123,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
return new IOperatorNodePushable() {
private CountDownLatch allOpenedSignal = new CountDownLatch(3);
private Set<Long> threads = new HashSet<>();
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
index b0c210f..b55b64e 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/AbstractExternalGroupbyTest.java
@@ -58,50 +58,39 @@
public abstract class AbstractExternalGroupbyTest {
- ISerializerDeserializer[] inFields = new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(),
- };
+ ISerializerDeserializer[] inFields = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ new UTF8StringSerializerDeserializer(), };
- ISerializerDeserializer[] aggrFields = new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), // key
- IntegerSerializerDeserializer.INSTANCE, // sum
- IntegerSerializerDeserializer.INSTANCE, // count
- FloatSerializerDeserializer.INSTANCE, // avg
+ ISerializerDeserializer[] aggrFields = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(), // key
+ IntegerSerializerDeserializer.INSTANCE, // sum
+ IntegerSerializerDeserializer.INSTANCE, // count
+ FloatSerializerDeserializer.INSTANCE, // avg
};
RecordDescriptor inRecordDesc = new RecordDescriptor(inFields);
RecordDescriptor outputRec = new RecordDescriptor(aggrFields);
- IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
+ IBinaryComparatorFactory[] comparatorFactories =
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
INormalizedKeyComputerFactory normalizedKeyComputerFactory = new UTF8StringNormalizedKeyComputerFactory();
IAggregatorDescriptorFactory partialAggrInPlace = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(0, false),
- new CountFieldAggregatorFactory(false),
- new AvgFieldGroupAggregatorFactory(0, false) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(0, false),
+ new CountFieldAggregatorFactory(false), new AvgFieldGroupAggregatorFactory(0, false) });
IAggregatorDescriptorFactory finalAggrInPlace = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, false),
- new IntSumFieldAggregatorFactory(2, false),
- new AvgFieldMergeAggregatorFactory(3, false) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, false),
+ new IntSumFieldAggregatorFactory(2, false), new AvgFieldMergeAggregatorFactory(3, false) });
IAggregatorDescriptorFactory partialAggrInState = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(0, true),
- new CountFieldAggregatorFactory(true),
- new AvgFieldGroupAggregatorFactory(0, true) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(0, true),
+ new CountFieldAggregatorFactory(true), new AvgFieldGroupAggregatorFactory(0, true) });
IAggregatorDescriptorFactory finalAggrInState = new MultiFieldsAggregatorFactory(
- new IFieldAggregateDescriptorFactory[] {
- new IntSumFieldAggregatorFactory(1, true),
- new IntSumFieldAggregatorFactory(2, true),
- new AvgFieldMergeAggregatorFactory(3, true) });
+ new IFieldAggregateDescriptorFactory[] { new IntSumFieldAggregatorFactory(1, true),
+ new IntSumFieldAggregatorFactory(2, true), new AvgFieldMergeAggregatorFactory(3, true) });
int[] keyFields = new int[] { 1 };
int[] keyFieldsAfterPartial = new int[] { 0 };
@@ -213,17 +202,15 @@
protected abstract IOperatorNodePushable getMerger();
- private void testBuildAndMerge(int tableSize, int numFrames, int frameSize, int minDataSize,
- int minRecordSize, int maxRecordSize,
- Map<Integer, String> specialData)
- throws HyracksDataException {
+ private void testBuildAndMerge(int tableSize, int numFrames, int frameSize, int minDataSize, int minRecordSize,
+ int maxRecordSize, Map<Integer, String> specialData) throws HyracksDataException {
IHyracksTaskContext ctx = TestUtils.create(frameSize);
initial(ctx, tableSize, numFrames);
ArrayList<IFrame> input = new ArrayList<>();
Map<Integer, String> keyValueMap = new HashMap<>();
- AbstractRunGeneratorTest
- .prepareData(ctx, input, minDataSize, minRecordSize, maxRecordSize, specialData, keyValueMap);
+ AbstractRunGeneratorTest.prepareData(ctx, input, minDataSize, minRecordSize, maxRecordSize, specialData,
+ keyValueMap);
ResultValidateWriter writer = new ResultValidateWriter(keyValueMap);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java
index 6729713..bd51619 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/IntersectOperatorDescriptorTest.java
@@ -51,9 +51,9 @@
public class IntersectOperatorDescriptorTest {
- IOperatorDescriptorRegistry mockRegistry = when(
- mock(IOperatorDescriptorRegistry.class).createOperatorDescriptorId(any()))
- .thenReturn(new OperatorDescriptorId(1)).getMock();
+ IOperatorDescriptorRegistry mockRegistry =
+ when(mock(IOperatorDescriptorRegistry.class).createOperatorDescriptorId(any()))
+ .thenReturn(new OperatorDescriptorId(1)).getMock();
MultiThreadTaskEmulator multiThreadTaskEmulator = new MultiThreadTaskEmulator();
InputFrameGenerator frameGenerator = new InputFrameGenerator(256);
IHyracksTaskContext ctx = TestUtils.create(256);
@@ -72,10 +72,9 @@
inputRecordDescriptor = new RecordDescriptor[nInputs];
normalizedKeyFactory = null;
- comparatorFactory = new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY),
- PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
- };
+ comparatorFactory =
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
for (int i = 0; i < nInputs; i++) {
compareFields[i] = new int[nProjectFields];
@@ -84,17 +83,13 @@
}
}
for (int i = 0; i < nInputs; i++) {
- inputRecordDescriptor[i] = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE
- });
+ inputRecordDescriptor[i] =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
}
outRecordDescriptor = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE
- });
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
}
@Before
@@ -208,8 +203,8 @@
generateRecordStream(answer, outRecordDescriptor, 0, 100, 1);
}
- private void generateRecordStream(List<Object[]> inputs, RecordDescriptor recordDesc,
- int start, int end, int step) {
+ private void generateRecordStream(List<Object[]> inputs, RecordDescriptor recordDesc, int start, int end,
+ int step) {
for (int i = start; i < end; i += step) {
Object[] obj = new Object[recordDesc.getFieldCount()];
for (int f = 0; f < recordDesc.getFieldCount(); f++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java
index b2a8323..016fe0b 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/unit/TopKRunGeneratorTest.java
@@ -137,8 +137,8 @@
public void testHybridTopKWithTwoNormalizedKeys() throws HyracksDataException {
int topK = SORT_FRAME_LIMIT;
IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
- AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(
- ctx, SORT_FRAME_LIMIT, topK, SortFields, new INormalizedKeyComputerFactory[] {
+ AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(ctx,
+ SORT_FRAME_LIMIT, topK, SortFields, new INormalizedKeyComputerFactory[] {
new IntegerNormalizedKeyComputerFactory(), new UTF8StringNormalizedKeyComputerFactory() },
ComparatorFactories, RecordDesc);
testInMemoryOnly(ctx, topK, ORDER.REVERSE, sorter);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java
index 77b6913..13ed058 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/util/OutputFrameVerifier.java
@@ -56,8 +56,7 @@
Object[] objects = new Object[inputRecordDescriptor.getFieldCount()];
for (int fid = 0; fid < inputRecordDescriptor.getFieldCount(); fid++) {
ByteArrayInputStream bais = new ByteArrayInputStream(frameAccessor.getBuffer().array(),
- frameAccessor.getAbsoluteFieldStartOffset(tid, fid),
- frameAccessor.getFieldLength(tid, fid));
+ frameAccessor.getAbsoluteFieldStartOffset(tid, fid), frameAccessor.getFieldLength(tid, fid));
DataInputStream dis = new DataInputStream(bais);
objects[fid] = inputRecordDescriptor.getFields()[fid].deserialize(dis);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
index 49b2779..7c85d5a 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-shutdown-test/src/test/java/org/apache/hyracks/examples/shutdown/test/ClusterShutdownIT.java
@@ -33,6 +33,7 @@
private static Logger LOGGER = LogManager.getLogger();
@Rule
public ExpectedException closeTwice = ExpectedException.none();
+
@Test
public void runShutdown() throws Exception {
IHyracksClientConnection hcc = new HyracksConnection("localhost", 1098);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
index 3c0ecfd..2844d02 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Common.java
@@ -43,22 +43,22 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
- static RecordDescriptor custOrderJoinDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor ordersDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
+ static RecordDescriptor custOrderJoinDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
static RecordDescriptor lineitemDesc = new RecordDescriptor(new ISerializerDeserializer[] {
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
@@ -70,25 +70,20 @@
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() });
- static IValueParserFactory[] lineitemParserFactories = new IValueParserFactory[] {
- IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
- IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
- IntegerParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
- FloatParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, };
+ static IValueParserFactory[] lineitemParserFactories = new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
+ IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE, IntegerParserFactory.INSTANCE,
+ IntegerParserFactory.INSTANCE, FloatParserFactory.INSTANCE, FloatParserFactory.INSTANCE,
+ FloatParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, };
static IValueParserFactory[] custParserFactories = new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE };
static IValueParserFactory[] orderParserFactories = new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
- UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE };
static FileSplit[] parseFileSplits(String fileSplits) {
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
index 42fe8c9..80c4f88 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Groupby.java
@@ -134,8 +134,8 @@
createPartitionConstraint(spec, fileScanner, inSplits);
// Output: each unique string with an integer count
- RecordDescriptor outDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ RecordDescriptor outDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
// IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE });
@@ -187,9 +187,9 @@
spec.connect(scanGroupConnDef2, fileScanner, 0, grouper, 0);
IFileSplitProvider outSplitProvider = new ConstantFileSplitProvider(outSplits);
- AbstractSingleActivityOperatorDescriptor writer = outPlain ? new PlainFileWriterOperatorDescriptor(spec,
- outSplitProvider, "|")
- : new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
+ AbstractSingleActivityOperatorDescriptor writer =
+ outPlain ? new PlainFileWriterOperatorDescriptor(spec, outSplitProvider, "|")
+ : new FrameFileWriterOperatorDescriptor(spec, outSplitProvider);
createPartitionConstraint(spec, writer, outSplits);
IConnectorDescriptor groupOutConn = new OneToOneConnectorDescriptor(spec);
spec.connect(groupOutConn, grouper, 0, writer, 0);
diff --git a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
index 7e56004..5043974 100644
--- a/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
+++ b/hyracks-fullstack/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/org/apache/hyracks/examples/tpch/client/Sort.java
@@ -87,13 +87,13 @@
}
static int[] SortFields = new int[] { 1, 0 };
- static IBinaryComparatorFactory[] SortFieldsComparatorFactories = new IBinaryComparatorFactory[] {
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
+ static IBinaryComparatorFactory[] SortFieldsComparatorFactories =
+ new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
- static IBinaryHashFunctionFactory[] orderBinaryHashFunctionFactories = new IBinaryHashFunctionFactory[] {
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
- PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) };
+ static IBinaryHashFunctionFactory[] orderBinaryHashFunctionFactories =
+ new IBinaryHashFunctionFactory[] { PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+ PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) };
public static void main(String[] args) throws Exception {
Options options = new Options();
@@ -107,8 +107,8 @@
IHyracksClientConnection hcc = new HyracksConnection(options.host, options.port);
JobSpecification job = createJob(parseFileSplits(options.inFileOrderSplits),
- parseFileSplits(options.outFileSplits),
- options.memBufferAlg, options.frameLimit, options.frameSize, options.topK, options.usingHeapSorter);
+ parseFileSplits(options.outFileSplits), options.memBufferAlg, options.frameLimit, options.frameSize,
+ options.topK, options.usingHeapSorter);
long start = System.currentTimeMillis();
JobId jobId = hcc.startJob(job,
@@ -156,8 +156,8 @@
spec.connect(
new MToNPartitioningMergingConnectorDescriptor(spec,
- new FieldHashPartitionComputerFactory(SortFields, orderBinaryHashFunctionFactories),
- SortFields, SortFieldsComparatorFactories, new UTF8StringNormalizedKeyComputerFactory()),
+ new FieldHashPartitionComputerFactory(SortFields, orderBinaryHashFunctionFactories), SortFields,
+ SortFieldsComparatorFactories, new UTF8StringNormalizedKeyComputerFactory()),
sorter, 0, printer, 0);
spec.addRoot(printer);
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java
index 9010378..02c5fb3 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/INcCollectionBuilder.java
@@ -30,7 +30,6 @@
*/
public interface INcCollectionBuilder {
- public INcCollection build(Map<String, NodeControllerInfo> ncNameToNcInfos,
- Map<String, List<String>> ipToNcMapping, Map<String, Integer> ncNameToIndex, String[] NCs, int[] workloads,
- int slotLimit);
+ public INcCollection build(Map<String, NodeControllerInfo> ncNameToNcInfos, Map<String, List<String>> ipToNcMapping,
+ Map<String, Integer> ncNameToIndex, String[] NCs, int[] workloads, int slotLimit);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java
index 6d3a082..57dadb0 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/api/ITupleWriterFactory.java
@@ -34,6 +34,7 @@
* the IHyracksTaskContext
* @return a tuple writer instance
*/
- public ITupleWriter getTupleWriter(IHyracksTaskContext ctx, int partition, int nPartition) throws HyracksDataException;
+ public ITupleWriter getTupleWriter(IHyracksTaskContext ctx, int partition, int nPartition)
+ throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
index 8357ae0..021efca 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/dataflow/HDFSReadOperatorDescriptor.java
@@ -90,7 +90,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
final InputSplit[] inputSplits = splitsFactory.getSplits();
return new AbstractUnaryOutputSourceOperatorNodePushable() {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
index 6d7d63b..c53a779 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/IPProximityNcCollectionBuilder.java
@@ -46,8 +46,7 @@
byte[] rawip;
try {
rawip = ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress();
- }
- catch (UnknownHostException e) {
+ } catch (UnknownHostException e) {
// QQQ Should probably have a neater solution than this
throw new RuntimeException(e);
}
@@ -122,8 +121,8 @@
/**
* Update the entry of the selected NC
*/
- List<String> dataLocations = ipToNcMapping.get(InetAddress.getByAddress(
- currentCandidateIp.getBytes()).getHostAddress());
+ List<String> dataLocations = ipToNcMapping
+ .get(InetAddress.getByAddress(currentCandidateIp.getBytes()).getHostAddress());
for (String nc : dataLocations) {
int ncIndex = ncNameToIndex.get(nc);
if (workloads[ncIndex] < slotLimit) {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
index c9bf547..63be8c5 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/RackAwareNcCollectionBuilder.java
@@ -52,8 +52,8 @@
final Map<List<Integer>, List<String>> pathToNCs = new HashMap<List<Integer>, List<String>>();
for (String NC : NCs) {
List<Integer> path = new ArrayList<>();
- String ipAddress = InetAddress.getByAddress(
- ncNameToNcInfos.get(NC).getNetworkAddress().lookupIpAddress()).getHostAddress();
+ String ipAddress = InetAddress
+ .getByAddress(ncNameToNcInfos.get(NC).getNetworkAddress().lookupIpAddress()).getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
@@ -64,8 +64,8 @@
ncs.add(NC);
}
- final TreeMap<List<Integer>, IntWritable> availableIpsToSlots = new TreeMap<List<Integer>, IntWritable>(
- (l1, l2) -> {
+ final TreeMap<List<Integer>, IntWritable> availableIpsToSlots =
+ new TreeMap<List<Integer>, IntWritable>((l1, l2) -> {
int commonLength = Math.min(l1.size(), l2.size());
for (int i = 0; i < commonLength; i++) {
int value1 = l1.get(i);
@@ -80,8 +80,9 @@
for (int i = 0; i < workloads.length; i++) {
if (workloads[i] < slotLimit) {
List<Integer> path = new ArrayList<Integer>();
- String ipAddress = InetAddress.getByAddress(
- ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress()).getHostAddress();
+ String ipAddress =
+ InetAddress.getByAddress(ncNameToNcInfos.get(NCs[i]).getNetworkAddress().lookupIpAddress())
+ .getHostAddress();
topology.lookupNetworkTerminal(ipAddress, path);
if (path.isEmpty()) {
// if the hyracks nc is not in the defined cluster
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
index 615f827..25cc9b3 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs/scheduler/Scheduler.java
@@ -130,8 +130,8 @@
public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, ClusterTopology topology)
throws HyracksException {
this(ncNameToNcInfos);
- this.ncCollectionBuilder = topology == null ? new IPProximityNcCollectionBuilder()
- : new RackAwareNcCollectionBuilder(topology);
+ this.ncCollectionBuilder =
+ topology == null ? new IPProximityNcCollectionBuilder() : new RackAwareNcCollectionBuilder(topology);
}
/**
@@ -276,7 +276,7 @@
*/
private void scheduleLocalSlots(InputSplit[] splits, int[] workloads, String[] locations, int slots, Random random,
boolean[] scheduled, final Map<String, IntWritable> locationToNumSplits)
- throws IOException, UnknownHostException {
+ throws IOException, UnknownHostException {
/** scheduling candidates will be ordered inversely according to their popularity */
PriorityQueue<String> scheduleCadndiates = new PriorityQueue<String>(3, new Comparator<String>() {
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
index 15bf260..0c635e0 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/dataflow/HDFSReadOperatorDescriptor.java
@@ -103,7 +103,7 @@
@Override
public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, final int partition, final int nPartitions)
- throws HyracksDataException {
+ throws HyracksDataException {
final List<FileSplit> inputSplits = splitsFactory.getSplits();
return new AbstractUnaryOutputSourceOperatorNodePushable() {
@@ -120,8 +120,8 @@
Job job = confFactory.getConf();
job.getConfiguration().setClassLoader(ctx.getJobletContext().getClassLoader());
IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx);
- InputFormat inputFormat = ReflectionUtils.newInstance(job.getInputFormatClass(),
- job.getConfiguration());
+ InputFormat inputFormat =
+ ReflectionUtils.newInstance(job.getInputFormatClass(), job.getConfiguration());
int size = inputSplits.size();
for (int i = 0; i < size; i++) {
/**
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java
index 97960bc..fb46842 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/main/java/org/apache/hyracks/hdfs2/scheduler/Scheduler.java
@@ -69,7 +69,8 @@
* the hyracks cluster toplogy
* @throws HyracksException
*/
- public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, ClusterTopology topology) throws HyracksException {
+ public Scheduler(Map<String, NodeControllerInfo> ncNameToNcInfos, ClusterTopology topology)
+ throws HyracksException {
scheduler = new org.apache.hyracks.hdfs.scheduler.Scheduler(ncNameToNcInfos, topology);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
index 0d0cd3e..b8351f3 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/dataflow/DataflowTest.java
@@ -73,7 +73,6 @@
private static final String PATH_TO_HADOOP_CONF = FileUtil.joinPath(TEST_RESOURCES, "hadoop", "conf");
protected static final String BUILD_DIR = FileUtil.joinPath("target", "build");
-
private static final String DATA_PATH = FileUtil.joinPath(TEST_RESOURCES, "data", "customer.tbl");
protected static final String HDFS_INPUT_PATH = "/customer/";
protected static final String HDFS_OUTPUT_PATH = "/customer_result/";
@@ -151,11 +150,11 @@
String[] readSchedule = scheduler.getLocationConstraints(splits);
JobSpecification jobSpec = new JobSpecification();
- RecordDescriptor recordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor recordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
- HyracksUtils.NC2_ID };
+ String[] locations =
+ new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
readSchedule, new TextKeyValueParserFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
@@ -164,19 +163,21 @@
new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
- HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
- new TextTupleWriterFactory());
+ HDFSWriteOperatorDescriptor writeOperator =
+ new HDFSWriteOperatorDescriptor(jobSpec, conf, new TextTupleWriterFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
- jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
- new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
- new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
+ jobSpec.connect(
+ new MToNPartitioningMergingConnectorDescriptor(jobSpec,
+ new FieldHashPartitionComputerFactory(new int[] { 0 },
+ new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+ new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
sortOperator, 0, writeOperator, 0);
jobSpec.addRoot(writeOperator);
- IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
- HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+ IHyracksClientConnection client =
+ new HyracksConnection(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
JobId jobId = client.startJob(jobSpec);
client.waitForCompletion(jobId);
@@ -195,8 +196,8 @@
Path actual = new Path(ACTUAL_RESULT_DIR);
dfs.copyToLocalFile(result, actual);
- TestUtils.compareWithResult(new File(FileUtil.joinPath(EXPECTED_RESULT_PATH, "part-0")), new File(
- FileUtil.joinPath(ACTUAL_RESULT_DIR, "customer_result", "part-0")));
+ TestUtils.compareWithResult(new File(FileUtil.joinPath(EXPECTED_RESULT_PATH, "part-0")),
+ new File(FileUtil.joinPath(ACTUAL_RESULT_DIR, "customer_result", "part-0")));
return true;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java
index bb28c79..b735833 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs/scheduler/SchedulerTest.java
@@ -58,8 +58,8 @@
* @throws Exception
*/
public void testSchedulerSimple() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
InputSplit[] fileSplits = new InputSplit[6];
fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
@@ -94,8 +94,8 @@
int dataPort = 5099;
int resultPort = 5098;
int messagingPort = 5097;
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(4, "nc", "10.0.0.",
- dataPort, resultPort, messagingPort);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(4, "nc", "10.0.0.", dataPort, resultPort, messagingPort);
ncNameToNcInfos.put("nc7",
new NodeControllerInfo("nc7", NodeStatus.ACTIVE, new NetworkAddress("10.0.0.7", dataPort),
new NetworkAddress("10.0.0.5", resultPort), new NetworkAddress("10.0.0.5", messagingPort), 2));
@@ -112,8 +112,8 @@
fileSplits[5] = new FileSplit(new Path("part-6"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
fileSplits[6] = new FileSplit(new Path("part-7"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
fileSplits[7] = new FileSplit(new Path("part-8"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
- fileSplits[8] = new FileSplit(new Path("part-12"), 0, 0,
- new String[] { "10.0.0.14", "10.0.0.11", "10.0.0.13" });
+ fileSplits[8] =
+ new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.14", "10.0.0.11", "10.0.0.13" });
fileSplits[9] = new FileSplit(new Path("part-10"), 0, 0, new String[] { "10.0.0.2", "10.0.0.1", "10.0.0.6" });
fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.7" });
fileSplits[11] = new FileSplit(new Path("part-9"), 0, 0, new String[] { "10.0.0.4", "10.0.0.5", "10.0.0.6" });
@@ -121,14 +121,14 @@
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
- String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc12", "nc7",
- "nc7", "nc12" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc12", "nc7", "nc7", "nc12" };
for (int i = 0; i < locationConstraints.length; i++) {
Assert.assertEquals(locationConstraints[i], expectedResults[i]);
}
- expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc7", "nc12", "nc7",
- "nc12" };
+ expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc7", "nc12", "nc7", "nc12" };
ClusterTopology topology = parseTopology();
scheduler = new Scheduler(ncNameToNcInfos, topology);
locationConstraints = scheduler.getLocationConstraints(fileSplits);
@@ -143,8 +143,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFS() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
InputSplit[] fileSplits = new InputSplit[12];
fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
@@ -160,8 +160,8 @@
fileSplits[10] = new FileSplit(new Path("part-11"), 0, 0, new String[] { "10.0.0.3", "10.0.0.4", "10.0.0.5" });
fileSplits[11] = new FileSplit(new Path("part-12"), 0, 0, new String[] { "10.0.0.2", "10.0.0.3", "10.0.0.5" });
- String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6",
- "nc5", "nc6" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6", "nc5", "nc6" };
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
@@ -184,8 +184,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFSOdd() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
InputSplit[] fileSplits = new InputSplit[13];
fileSplits[0] = new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" });
@@ -227,8 +227,8 @@
* @throws Exception
*/
public void testSchedulercBoundary() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
/** test empty file splits */
InputSplit[] fileSplits = new InputSplit[0];
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
index 3c9b1c0..8be6d69 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/dataflow/DataflowTest.java
@@ -98,11 +98,11 @@
String[] readSchedule = scheduler.getLocationConstraints(splits);
JobSpecification jobSpec = new JobSpecification();
- RecordDescriptor recordDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
+ RecordDescriptor recordDesc =
+ new RecordDescriptor(new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer() });
- String[] locations = new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID,
- HyracksUtils.NC2_ID };
+ String[] locations =
+ new String[] { HyracksUtils.NC1_ID, HyracksUtils.NC1_ID, HyracksUtils.NC2_ID, HyracksUtils.NC2_ID };
HDFSReadOperatorDescriptor readOperator = new HDFSReadOperatorDescriptor(jobSpec, recordDesc, conf, splits,
readSchedule, new TextKeyValueParserFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, readOperator, locations);
@@ -111,19 +111,21 @@
new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, recordDesc);
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, sortOperator, locations);
- HDFSWriteOperatorDescriptor writeOperator = new HDFSWriteOperatorDescriptor(jobSpec, conf,
- new TextTupleWriterFactory());
+ HDFSWriteOperatorDescriptor writeOperator =
+ new HDFSWriteOperatorDescriptor(jobSpec, conf, new TextTupleWriterFactory());
PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, writeOperator, HyracksUtils.NC1_ID);
jobSpec.connect(new OneToOneConnectorDescriptor(jobSpec), readOperator, 0, sortOperator, 0);
- jobSpec.connect(new MToNPartitioningMergingConnectorDescriptor(jobSpec, new FieldHashPartitionComputerFactory(
- new int[] { 0 }, new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
- new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
+ jobSpec.connect(
+ new MToNPartitioningMergingConnectorDescriptor(jobSpec,
+ new FieldHashPartitionComputerFactory(new int[] { 0 },
+ new IBinaryHashFunctionFactory[] { RawBinaryHashFunctionFactory.INSTANCE }),
+ new int[] { 0 }, new IBinaryComparatorFactory[] { RawBinaryComparatorFactory.INSTANCE }, null),
sortOperator, 0, writeOperator, 0);
jobSpec.addRoot(writeOperator);
- IHyracksClientConnection client = new HyracksConnection(HyracksUtils.CC_HOST,
- HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
+ IHyracksClientConnection client =
+ new HyracksConnection(HyracksUtils.CC_HOST, HyracksUtils.TEST_HYRACKS_CC_CLIENT_PORT);
JobId jobId = client.startJob(jobSpec);
client.waitForCompletion(jobId);
diff --git a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java
index 4d970ba..82230718 100644
--- a/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-hdfs/hyracks-hdfs-core/src/test/java/org/apache/hyracks/hdfs2/scheduler/SchedulerTest.java
@@ -43,8 +43,8 @@
* @throws Exception
*/
public void testSchedulerSimple() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
@@ -70,8 +70,8 @@
* @throws Exception
*/
public void testSchedulerLargerHDFS() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
@@ -90,8 +90,8 @@
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
- String[] expectedResults = new String[] { "nc1", "nc4", "nc6", "nc1", "nc4", "nc2", "nc2", "nc3", "nc6", "nc5",
- "nc3", "nc5" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc6", "nc1", "nc4", "nc2", "nc2", "nc3", "nc6", "nc5", "nc3", "nc5" };
for (int i = 0; i < locationConstraints.length; i++) {
Assert.assertEquals(locationConstraints[i], expectedResults[i]);
@@ -104,8 +104,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFS() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
@@ -124,8 +124,8 @@
Scheduler scheduler = new Scheduler(ncNameToNcInfos);
String[] locationConstraints = scheduler.getLocationConstraints(fileSplits);
- String[] expectedResults = new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6",
- "nc5", "nc6" };
+ String[] expectedResults =
+ new String[] { "nc1", "nc4", "nc4", "nc1", "nc3", "nc2", "nc2", "nc3", "nc5", "nc6", "nc5", "nc6" };
for (int i = 0; i < locationConstraints.length; i++) {
Assert.assertEquals(locationConstraints[i], expectedResults[i]);
@@ -138,8 +138,8 @@
* @throws Exception
*/
public void testSchedulerSmallerHDFSOdd() throws Exception {
- Map<String, NodeControllerInfo> ncNameToNcInfos = TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099,
- 5098, 5097);
+ Map<String, NodeControllerInfo> ncNameToNcInfos =
+ TestUtils.generateNodeControllerInfo(6, "nc", "10.0.0.", 5099, 5098, 5097);
List<InputSplit> fileSplits = new ArrayList<>();
fileSplits.add(new FileSplit(new Path("part-1"), 0, 0, new String[] { "10.0.0.1", "10.0.0.2", "10.0.0.3" }));
diff --git a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
index cb6ad0d..3ab2ab9 100644
--- a/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
+++ b/hyracks-fullstack/hyracks/hyracks-http/src/main/java/org/apache/hyracks/http/server/HttpRequestCapacityController.java
@@ -68,8 +68,8 @@
HttpResponseEncoder encoder = new HttpResponseEncoder();
ChannelPromise promise = ctx.newPromise();
promise.addListener(ChannelFutureListener.CLOSE);
- DefaultFullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1,
- HttpResponseStatus.SERVICE_UNAVAILABLE);
+ DefaultFullHttpResponse response =
+ new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.SERVICE_UNAVAILABLE);
try {
encoder.write(ctx, response, ctx.voidPromise());
ctx.writeAndFlush(ctx.alloc().buffer(0), promise);
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
index 7688974..86c8c75 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/IPCConnectionManager.java
@@ -338,9 +338,8 @@
// reset failingLoops on a good loop
failingLoops = 0;
} catch (Exception e) {
- int sleepSecs = (int)Math.pow(2, Math.min(11, failingLoops++));
- LOGGER.log(Level.ERROR, "Exception processing message; sleeping " + sleepSecs
- + " seconds", e);
+ int sleepSecs = (int) Math.pow(2, Math.min(11, failingLoops++));
+ LOGGER.log(Level.ERROR, "Exception processing message; sleeping " + sleepSecs + " seconds", e);
try {
Thread.sleep(TimeUnit.SECONDS.toMillis(sleepSecs));
} catch (InterruptedException e1) {
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
index 2c1f0dc..c4263d2 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/JavaSerializationBasedPayloadSerializerDeserializer.java
@@ -55,8 +55,8 @@
}
private Object deserialize(ByteBuffer buffer, int length) throws Exception {
- ObjectInputStream ois = new ObjectInputStream(new ByteArrayInputStream(buffer.array(), buffer.position(),
- length));
+ ObjectInputStream ois =
+ new ObjectInputStream(new ByteArrayInputStream(buffer.array(), buffer.position(), length));
Object object = ois.readObject();
ois.close();
return object;
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java
index 1f3f0c3..550ce45 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/Message.java
@@ -103,8 +103,8 @@
int length = msgSize - HEADER_SIZE;
try {
IPayloadSerializerDeserializer serde = ipcHandle.getIPCSystem().getSerializerDeserializer();
- payload = flag == ERROR ? serde.deserializeException(buffer, length) : serde.deserializeObject(buffer,
- length);
+ payload = flag == ERROR ? serde.deserializeException(buffer, length)
+ : serde.deserializeObject(buffer, length);
} finally {
buffer.position(finalPosition);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java b/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java
index b454520..1a075d5 100644
--- a/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-ipc/src/test/java/org/apache/hyracks/ipc/tests/IPCTest.java
@@ -63,8 +63,8 @@
final Executor executor = Executors.newCachedThreadPool();
IIPCI ipci = new IIPCI() {
@Override
- public void deliverIncomingMessage(final IIPCHandle handle, final long mid, long rmid,
- final Object payload, Exception exception) {
+ public void deliverIncomingMessage(final IIPCHandle handle, final long mid, long rmid, final Object payload,
+ Exception exception) {
executor.execute(new Runnable() {
@Override
public void run() {
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java
index 8e91be4..3aef194 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DependencySet.java
@@ -27,7 +27,7 @@
@SuppressWarnings("unused") // set by Maven configuration
private String location;
- @SuppressWarnings({"unused", "MismatchedQueryAndUpdateOfCollection"}) // set by Maven configuration
+ @SuppressWarnings({ "unused", "MismatchedQueryAndUpdateOfCollection" }) // set by Maven configuration
private List<String> includes;
private List<Pattern> patterns;
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java
index 7d0e77d..1b2961f 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/DownloadLicensesMojo.java
@@ -39,10 +39,7 @@
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.ProjectBuildingException;
-@Mojo(name = "licensedownload",
- requiresProject = true,
- requiresDependencyResolution = ResolutionScope.TEST,
- defaultPhase = LifecyclePhase.GENERATE_RESOURCES)
+@Mojo(name = "licensedownload", requiresProject = true, requiresDependencyResolution = ResolutionScope.TEST, defaultPhase = LifecyclePhase.GENERATE_RESOURCES)
public class DownloadLicensesMojo extends LicenseMojo {
@Parameter(required = true)
@@ -73,7 +70,7 @@
private void doDownload(int timeoutMillis, int id, String url, String fileName) {
try {
- HttpURLConnection conn = (HttpURLConnection)new URL(url).openConnection();
+ HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection();
conn.setConnectTimeout(timeoutMillis);
conn.setReadTimeout(timeoutMillis);
conn.setRequestMethod("GET");
@@ -90,4 +87,3 @@
}
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java
index 387d18e..0245eb3 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/GenerateFileMojo.java
@@ -45,12 +45,6 @@
import java.util.jar.JarFile;
import java.util.regex.Pattern;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SequenceWriter;
-import freemarker.cache.FileTemplateLoader;
-import freemarker.template.Configuration;
-import freemarker.template.Template;
-import freemarker.template.TemplateException;
import org.apache.commons.io.IOUtils;
import org.apache.hyracks.maven.license.freemarker.IndentDirective;
import org.apache.hyracks.maven.license.freemarker.LoadFileDirective;
@@ -63,13 +57,19 @@
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.ProjectBuildingException;
-@Mojo(name = "generate",
- requiresProject = true,
- requiresDependencyResolution = ResolutionScope.TEST)
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SequenceWriter;
+import freemarker.cache.FileTemplateLoader;
+import freemarker.template.Configuration;
+import freemarker.template.Template;
+import freemarker.template.TemplateException;
+
+@Mojo(name = "generate", requiresProject = true, requiresDependencyResolution = ResolutionScope.TEST)
public class GenerateFileMojo extends LicenseMojo {
- public static final Pattern FOUNDATION_PATTERN = Pattern.compile("^\\s*This product includes software developed " +
- "(at|by) The Apache Software Foundation \\(http://www.apache.org/\\).\\s*$".replace(" ", "\\s+"),
+ public static final Pattern FOUNDATION_PATTERN = Pattern.compile(
+ "^\\s*This product includes software developed "
+ + "(at|by) The Apache Software Foundation \\(http://www.apache.org/\\).\\s*$".replace(" ", "\\s+"),
Pattern.DOTALL | Pattern.MULTILINE);
public static final Comparator<String> WHITESPACE_NORMALIZED_COMPARATOR =
@@ -121,7 +121,6 @@
}
}
-
private void resolveLicenseContent() throws IOException {
Set<LicenseSpec> licenseSpecs = new HashSet<>();
for (LicensedProjects licensedProjects : licenseMap.values()) {
@@ -158,7 +157,7 @@
private void combineCommonGavs() {
for (LicensedProjects licensedProjects : licenseMap.values()) {
Map<String, Project> projectMap = new HashMap<>();
- for (Iterator<Project> iter = licensedProjects.getProjects().iterator(); iter.hasNext(); ) {
+ for (Iterator<Project> iter = licensedProjects.getProjects().iterator(); iter.hasNext();) {
Project project = iter.next();
if (projectMap.containsKey(project.gav())) {
Project first = projectMap.get(project.gav());
@@ -208,19 +207,17 @@
private void readExtraMaps() throws IOException {
final ObjectMapper objectMapper = new ObjectMapper();
for (ExtraLicenseFile extraLicenseFile : extraLicenseMaps) {
- for (LicensedProjects projects :
- objectMapper.readValue(extraLicenseFile.getFile(), LicensedProjects[].class)) {
+ for (LicensedProjects projects : objectMapper.readValue(extraLicenseFile.getFile(),
+ LicensedProjects[].class)) {
LicenseSpec spec = urlToLicenseMap.get(projects.getLicense().getUrl());
if (spec != null) {
// TODO(mblow): probably we should always favor the extra map...
// propagate any license content we may have with what already has been loaded
- if (projects.getLicense().getContent() != null &&
- spec.getContent() == null) {
+ if (projects.getLicense().getContent() != null && spec.getContent() == null) {
spec.setContent(projects.getLicense().getContent());
}
// propagate any license displayName we may have with what already has been loaded
- if (projects.getLicense().getDisplayName() != null &&
- spec.getDisplayName() == null) {
+ if (projects.getLicense().getDisplayName() != null && spec.getDisplayName() == null) {
spec.setDisplayName(projects.getLicense().getDisplayName());
}
}
@@ -235,8 +232,8 @@
private void persistLicenseMap() throws IOException {
if (licenseMapOutputFile != null) {
licenseMapOutputFile.getParentFile().mkdirs();
- SequenceWriter sw = new ObjectMapper().writerWithDefaultPrettyPrinter()
- .writeValues(licenseMapOutputFile).init(true);
+ SequenceWriter sw =
+ new ObjectMapper().writerWithDefaultPrettyPrinter().writeValues(licenseMapOutputFile).init(true);
for (LicensedProjects entry : licenseMap.values()) {
sw.write(entry);
}
@@ -321,7 +318,7 @@
}
private void resolveArtifactFiles(final String name, Predicate<JarEntry> filter,
- BiConsumer<Project, String> consumer, UnaryOperator<String> contentTransformer)
+ BiConsumer<Project, String> consumer, UnaryOperator<String> contentTransformer)
throws MojoExecutionException, IOException {
for (Project p : getProjects()) {
File artifactFile = new File(p.getArtifactPath());
@@ -332,8 +329,7 @@
continue;
}
try (JarFile jarFile = new JarFile(artifactFile)) {
- SortedMap<String, JarEntry> matches = gatherMatchingEntries(jarFile,
- filter);
+ SortedMap<String, JarEntry> matches = gatherMatchingEntries(jarFile, filter);
if (matches.isEmpty()) {
getLog().warn("No " + name + " file found for " + p.gav());
} else {
@@ -343,15 +339,14 @@
} else {
getLog().info(p.gav() + " has " + name + " file: " + matches.keySet());
}
- resolveContent(p, jarFile, matches.values().iterator().next(),
- contentTransformer, consumer, name);
+ resolveContent(p, jarFile, matches.values().iterator().next(), contentTransformer, consumer, name);
}
}
}
}
private void resolveContent(Project project, JarFile jarFile, JarEntry entry, UnaryOperator<String> transformer,
- BiConsumer<Project, String> contentConsumer, final String name) throws IOException {
+ BiConsumer<Project, String> contentConsumer, final String name) throws IOException {
String text = IOUtils.toString(jarFile.getInputStream(entry), StandardCharsets.UTF_8);
text = transformer.apply(text);
text = LicenseUtil.trim(text);
@@ -375,4 +370,3 @@
return matches;
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
index 6d8f9cf..97afffb 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseMojo.java
@@ -59,7 +59,7 @@
protected List<Override> overrides = new ArrayList<>();
@Parameter
- protected String [] models = new String [0];
+ protected String[] models = new String[0];
@Parameter
protected List<LicenseSpec> licenses = new ArrayList<>();
@@ -73,31 +73,31 @@
@Parameter
protected List<DependencySet> dependencySets = new ArrayList<>();
- @Parameter( defaultValue = "${project}", readonly = true )
+ @Parameter(defaultValue = "${project}", readonly = true)
protected MavenProject project;
- @Parameter( property = "localRepository", required = true, readonly = true )
+ @Parameter(property = "localRepository", required = true, readonly = true)
private ArtifactRepository localRepository;
- @Parameter( property = "project.remoteArtifactRepositories", required = true, readonly = true )
+ @Parameter(property = "project.remoteArtifactRepositories", required = true, readonly = true)
private List<ArtifactRepository> remoteRepositories;
- @Component( role = MavenProjectBuilder.class )
+ @Component(role = MavenProjectBuilder.class)
protected MavenProjectBuilder projectBuilder;
@Component
private ModelInheritanceAssembler assembler;
- @Parameter( defaultValue = "${session}", required = true, readonly = true )
+ @Parameter(defaultValue = "${session}", required = true, readonly = true)
protected MavenSession session;
@Component
protected ArtifactResolver artifactResolver;
- @Parameter ( required = true )
+ @Parameter(required = true)
private String location;
- @Parameter ( required = true )
+ @Parameter(required = true)
protected File licenseDirectory;
private Map<String, MavenProject> projectCache = new HashMap<>();
@@ -113,8 +113,7 @@
return licenseMap;
}
- protected void init() throws MojoExecutionException, MalformedURLException,
- ProjectBuildingException {
+ protected void init() throws MojoExecutionException, MalformedURLException, ProjectBuildingException {
excludedScopes.add("system");
excludePatterns = compileExcludePatterns();
supplementModels = SupplementalModelHelper.loadSupplements(getLog(), models);
@@ -144,7 +143,7 @@
}
private void addDependencyToLicenseMap(MavenProject depProject, List<Pair<String, String>> depLicenses,
- String depLocation) {
+ String depLocation) {
final String depGav = toGav(depProject);
getLog().debug("adding " + depGav + ", location: " + depLocation);
final MutableBoolean usedMetric = new MutableBoolean(false);
@@ -152,17 +151,16 @@
Collections.sort(depLicenses, (o1, o2) -> {
final int metric1 = getLicenseMetric(o1.getLeft());
final int metric2 = getLicenseMetric(o2.getLeft());
- usedMetric.setValue(usedMetric.booleanValue()
- || metric1 != LicenseSpec.UNDEFINED_LICENSE_METRIC
+ usedMetric.setValue(usedMetric.booleanValue() || metric1 != LicenseSpec.UNDEFINED_LICENSE_METRIC
|| metric2 != LicenseSpec.UNDEFINED_LICENSE_METRIC);
return Integer.compare(metric1, metric2);
});
if (usedMetric.booleanValue()) {
- getLog().info("Multiple licenses for " + depGav + ": " + depLicenses
- + "; taking lowest metric: " + depLicenses.get(0));
+ getLog().info("Multiple licenses for " + depGav + ": " + depLicenses + "; taking lowest metric: "
+ + depLicenses.get(0));
} else {
- getLog().warn("Multiple licenses for " + depGav + ": " + depLicenses
- + "; taking first listed: " + depLicenses.get(0));
+ getLog().warn("Multiple licenses for " + depGav + ": " + depLicenses + "; taking first listed: "
+ + depLicenses.get(0));
}
} else if (depLicenses.isEmpty()) {
getLog().info("no license defined in model for " + depGav);
@@ -179,8 +177,7 @@
} catch (MalformedURLException e) {
// we encounter this a lot. Log a warning, and use an annotated key
final String fakeLicenseUrl = depGav.replaceAll(":", "--") + "_" + licenseUrl;
- getLog().info("- URL for " + depGav + " is malformed: " + licenseUrl + "; using: "
- + fakeLicenseUrl);
+ getLog().info("- URL for " + depGav + " is malformed: " + licenseUrl + "; using: " + fakeLicenseUrl);
licenseUrl = fakeLicenseUrl;
}
}
@@ -196,7 +193,7 @@
urlToLicenseMap.put(licenseUrl, license);
for (String alias : license.getAliasUrls()) {
if (!urlToLicenseMap.containsKey(alias)) {
- urlToLicenseMap.put(alias ,license);
+ urlToLicenseMap.put(alias, license);
}
}
} else if (license.getDisplayName() == null && spec.getDisplayName() != null) {
@@ -216,11 +213,11 @@
private void buildUrlLicenseMap() throws MojoExecutionException {
for (LicenseSpec license : licenses) {
- if (urlToLicenseMap.put(license.getUrl() ,license) != null) {
+ if (urlToLicenseMap.put(license.getUrl(), license) != null) {
throw new MojoExecutionException("Duplicate URL mapping: " + license.getUrl());
}
for (String alias : license.getAliasUrls()) {
- if (urlToLicenseMap.put(alias ,license) != null) {
+ if (urlToLicenseMap.put(alias, license) != null) {
throw new MojoExecutionException("Duplicate URL mapping: " + alias);
}
}
@@ -238,20 +235,19 @@
if (dep == null) {
getLog().warn("Unused override dependency " + gav + "; ignoring...");
} else {
- final List<Pair<String, String>> newLicense = Collections.singletonList(
- new ImmutablePair<>(override.getUrl(), override.getName()));
+ final List<Pair<String, String>> newLicense =
+ Collections.singletonList(new ImmutablePair<>(override.getUrl(), override.getName()));
List<Pair<String, String>> prevLicense = dependencyLicenseMap.put(dep, newLicense);
- getLog().warn("license list for " + toGav(dep)
- + " changed with <override>; was: " + prevLicense
+ getLog().warn("license list for " + toGav(dep) + " changed with <override>; was: " + prevLicense
+ ", now: " + newLicense);
}
}
return dependencyLicenseMap;
}
- private void gatherProjectDependencies(MavenProject project, Map<MavenProject,
- List<Pair<String, String>>> dependencyLicenseMap, Map<String, MavenProject> dependencyGavMap)
- throws ProjectBuildingException {
+ private void gatherProjectDependencies(MavenProject project,
+ Map<MavenProject, List<Pair<String, String>>> dependencyLicenseMap,
+ Map<String, MavenProject> dependencyGavMap) throws ProjectBuildingException {
final Set dependencyArtifacts = project.getArtifacts();
if (dependencyArtifacts != null) {
for (Object depArtifactObj : dependencyArtifacts) {
@@ -264,8 +260,7 @@
for (Object license : dep.getLicenses()) {
final License license1 = (License) license;
String url = license1.getUrl() != null ? license1.getUrl()
- : (license1.getName() != null ? license1.getName()
- : "LICENSE_EMPTY_NAME_URL");
+ : (license1.getName() != null ? license1.getName() : "LICENSE_EMPTY_NAME_URL");
licenseUrls.add(new ImmutablePair<>(url, license1.getName()));
}
dependencyLicenseMap.put(dep, licenseUrls);
@@ -286,22 +281,21 @@
throw new ProjectBuildingException(key, "Error creating dependent artifacts", e);
}
- Model supplement = supplementModels.get(
- SupplementalModelHelper.generateSupplementMapKey(depObj.getGroupId(), depObj.getArtifactId()));
+ Model supplement = supplementModels
+ .get(SupplementalModelHelper.generateSupplementMapKey(depObj.getGroupId(), depObj.getArtifactId()));
if (supplement != null) {
Model merged = SupplementalModelHelper.mergeModels(assembler, depProj.getModel(), supplement);
- Set<String> origLicenses = depProj.getModel().getLicenses().stream().map(License::getUrl)
- .collect(Collectors.toSet());
- Set<String> newLicenses = merged.getLicenses().stream().map(License::getUrl)
- .collect(Collectors.toSet());
+ Set<String> origLicenses =
+ depProj.getModel().getLicenses().stream().map(License::getUrl).collect(Collectors.toSet());
+ Set<String> newLicenses =
+ merged.getLicenses().stream().map(License::getUrl).collect(Collectors.toSet());
if (!origLicenses.equals(newLicenses)) {
- getLog().warn("license list for " + toGav(depProj)
- + " changed with supplemental model; was: " + origLicenses
- + ", now: " + newLicenses);
+ getLog().warn("license list for " + toGav(depProj) + " changed with supplemental model; was: "
+ + origLicenses + ", now: " + newLicenses);
}
depProj = new MavenProject(merged);
- depProj.setArtifact( depObj );
- depProj.setVersion( depObj.getVersion() );
+ depProj.setArtifact(depObj);
+ depProj.setVersion(depObj.getVersion());
}
depProj.getArtifact().setScope(depObj.getScope());
projectCache.put(key, depProj);
@@ -354,4 +348,3 @@
return artifactResolver;
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java
index f2ff5dd..cd955d9 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseSpec.java
@@ -46,8 +46,8 @@
@JsonCreator
public LicenseSpec(@JsonProperty("aliasUrls") List<String> aliasUrls, @JsonProperty("content") String content,
- @JsonProperty("contentFile") String contentFile, @JsonProperty("displayName") String displayName,
- @JsonProperty("metric") int metric, @JsonProperty("url") String url) {
+ @JsonProperty("contentFile") String contentFile, @JsonProperty("displayName") String displayName,
+ @JsonProperty("metric") int metric, @JsonProperty("url") String url) {
this.aliasUrls = aliasUrls;
this.content = content;
this.contentFile = contentFile;
@@ -77,8 +77,7 @@
String file;
try {
URI uri = new URI(url);
- file = ((uri.getHost() != null ? uri.getHost() : "")
- + uri.getPath()).replaceAll(BAD_CHARS, "_");
+ file = ((uri.getHost() != null ? uri.getHost() : "") + uri.getPath()).replaceAll(BAD_CHARS, "_");
} catch (URISyntaxException e) {
file = url.replaceAll(BAD_CHARS, "_");
}
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java
index 30588d4..a80dc1d 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/LicenseUtil.java
@@ -91,7 +91,7 @@
private static void doTrim(Writer out, BufferedReader reader, int extraPadding, int wrapLength) throws IOException {
boolean head = true;
int empty = 0;
- for (String line = reader.readLine(); line != null; line = reader.readLine() ) {
+ for (String line = reader.readLine(); line != null; line = reader.readLine()) {
if ("".equals(line.trim())) {
if (!head) {
empty++;
@@ -136,7 +136,7 @@
continue;
}
String fullyTrimmed = line.trim();
- freeSpaces = Math.min(freeSpaces, rightTrimmed.length() - fullyTrimmed.length());
+ freeSpaces = Math.min(freeSpaces, rightTrimmed.length() - fullyTrimmed.length());
maxLineLength = Math.max(maxLineLength, fullyTrimmed.length());
}
return new ImmutablePair<>(freeSpaces, maxLineLength);
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java
index c99a047..aa532e6 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/Override.java
@@ -18,7 +18,7 @@
*/
package org.apache.hyracks.maven.license;
-public class Override {
+public class Override {
@SuppressWarnings("unused") // set by Maven plugin configuration
private String url;
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java
index c713b08..0a24a76 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/SourcePointerResolver.java
@@ -82,15 +82,14 @@
}
private void ensureCDDLSourcesPointer(Collection<Project> projects, ArtifactRepository central,
- ArtifactResolutionRequest request)
- throws ProjectBuildingException, IOException {
+ ArtifactResolutionRequest request) throws ProjectBuildingException, IOException {
for (Project p : projects) {
if (p.getSourcePointer() != null) {
continue;
}
mojo.getLog().debug("finding sources for artifact: " + p);
- Artifact sourcesArtifact = new DefaultArtifact(p.getGroupId(), p.getArtifactId(),
- p.getVersion(), Artifact.SCOPE_COMPILE, "jar", "sources", null);
+ Artifact sourcesArtifact = new DefaultArtifact(p.getGroupId(), p.getArtifactId(), p.getVersion(),
+ Artifact.SCOPE_COMPILE, "jar", "sources", null);
MavenProject mavenProject = mojo.resolveDependency(sourcesArtifact);
sourcesArtifact.setArtifactHandler(mavenProject.getArtifact().getArtifactHandler());
final ArtifactRepository localRepo = mojo.getSession().getLocalRepository();
@@ -162,7 +161,7 @@
@java.lang.Override
public String pathOfLocalRepositoryMetadata(ArtifactMetadata artifactMetadata,
- ArtifactRepository artifactRepository) {
+ ArtifactRepository artifactRepository) {
return null;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java
index 77b8afd..f58b419 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/IndentDirective.java
@@ -43,7 +43,7 @@
private static final String PARAM_NAME_WRAP = "wrap";
@Override
- public void execute(Environment env, Map params, TemplateModel [] loopVars, TemplateDirectiveBody body)
+ public void execute(Environment env, Map params, TemplateModel[] loopVars, TemplateDirectiveBody body)
throws TemplateException, IOException {
int numSpaces = -1;
@@ -106,8 +106,7 @@
}
private TemplateModelException paramException(String paramName, String message) throws TemplateModelException {
- return new TemplateModelException(
- "The '" + paramName + "' parameter " + message);
+ return new TemplateModelException("The '" + paramName + "' parameter " + message);
}
private static class IndentingWriter extends Writer {
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java
index 67da23f..2b03fe2 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/freemarker/LoadFileDirective.java
@@ -46,9 +46,7 @@
private static final String PARAM_DEFAULT_TEXT = "defaultOnMissing";
@Override
- public void execute(Environment env,
- Map params, TemplateModel[] loopVars,
- TemplateDirectiveBody body)
+ public void execute(Environment env, Map params, TemplateModel[] loopVars, TemplateDirectiveBody body)
throws TemplateException, IOException {
String fileParam = null;
@@ -91,15 +89,14 @@
}
}
if (fileParam == null) {
- throw new TemplateModelException(
- "The required \"" + PARAM_FILE + "\" parameter"
- + "is missing.");
+ throw new TemplateModelException("The required \"" + PARAM_FILE + "\" parameter" + "is missing.");
}
if (body != null) {
throw new TemplateModelException("Body is not supported by this directive");
}
Writer out = env.getOut();
- File baseDir = ((FileTemplateLoader)((Configuration)env.getTemplate().getParent()).getTemplateLoader()).baseDir;
+ File baseDir =
+ ((FileTemplateLoader) ((Configuration) env.getTemplate().getParent()).getTemplateLoader()).baseDir;
File file = new File(baseDir, fileParam);
if (file.exists()) {
if (trimParam) {
@@ -108,7 +105,7 @@
} else {
IOUtils.copy(new FileInputStream(file), out, StandardCharsets.UTF_8);
}
- } else if (defaultParam != null ) {
+ } else if (defaultParam != null) {
out.append(defaultParam).append("\n");
} else {
throw new IOException("File not found: " + file);
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java
index 952b91a..f6f9f32 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/LicensedProjects.java
@@ -38,7 +38,7 @@
@JsonCreator
public LicensedProjects(@JsonProperty("license") LicenseSpec license,
- @JsonProperty("projects") Set<Project> projects) {
+ @JsonProperty("projects") Set<Project> projects) {
this.license = license;
this.projects.addAll(projects);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java
index 80d4548..ff35162 100644
--- a/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java
+++ b/hyracks-fullstack/hyracks/hyracks-maven-plugins/license-automation-plugin/src/main/java/org/apache/hyracks/maven/license/project/Project.java
@@ -44,8 +44,8 @@
@JsonIgnore
private MavenProject mavenProject;
- public static final Comparator<Project> PROJECT_COMPARATOR = (o1, o2) ->
- o1.compareToken().compareTo(o2.compareToken());
+ public static final Comparator<Project> PROJECT_COMPARATOR =
+ (o1, o2) -> o1.compareToken().compareTo(o2.compareToken());
public Project(MavenProject project, String location, File artifactPath) {
mavenProject = project;
@@ -60,10 +60,10 @@
@JsonCreator
public Project(@JsonProperty("name") String name, @JsonProperty("groupId") String groupId,
- @JsonProperty("artifactId") String artifactId, @JsonProperty("url") String url,
- @JsonProperty("version") String version, @JsonProperty("location") String location,
- @JsonProperty("artifactPath") String artifactPath, @JsonProperty("noticeText") String noticeText,
- @JsonProperty("licenseText") String licenseText) {
+ @JsonProperty("artifactId") String artifactId, @JsonProperty("url") String url,
+ @JsonProperty("version") String version, @JsonProperty("location") String location,
+ @JsonProperty("artifactPath") String artifactPath, @JsonProperty("noticeText") String noticeText,
+ @JsonProperty("licenseText") String licenseText) {
this.name = name;
this.groupId = groupId;
this.artifactId = artifactId;
diff --git a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
index 81636de..286320b 100644
--- a/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
+++ b/hyracks-fullstack/hyracks/hyracks-net/src/main/java/org/apache/hyracks/net/protocols/muxdemux/MultiplexedConnection.java
@@ -265,8 +265,8 @@
pendingWriteEventsCounter.decrement();
}
BitSet pendingChannelCreditsBitmap = cSet.getPendingChannelCreditsBitmap();
- for (int j = pendingChannelCreditsBitmap.nextSetBit(0); j >= 0; j = pendingChannelCreditsBitmap
- .nextSetBit(j)) {
+ for (int j = pendingChannelCreditsBitmap.nextSetBit(0); j >= 0; j =
+ pendingChannelCreditsBitmap.nextSetBit(j)) {
writerState.command.setChannelId(j);
writerState.command.setCommandType(MuxDemuxCommand.CommandType.ADD_CREDITS);
ChannelControlBlock ccb = cSet.getCCB(j);
diff --git a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java
index e1590e7..8aa5eab 100644
--- a/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java
+++ b/hyracks-fullstack/hyracks/hyracks-server/src/main/java/org/apache/hyracks/server/process/HyracksVirtualCluster.java
@@ -83,4 +83,3 @@
}
}
}
-
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
index e0cf1a80..9525192 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-bloomfilter/src/main/java/org/apache/hyracks/storage/am/bloomfilter/impls/BloomCalculations.java
@@ -64,8 +64,8 @@
0.000176 },
{ 1.0, 0.0513, 0.00998, 0.00312, 0.0013, 0.000663, 0.000394, 0.000264, 0.000194, 0.000155, 0.000132,
0.000118, 0.000111, 0.000109 },
- { 1.0, 0.0488, 0.00906, 0.0027, 0.00108, 0.00053, 0.000303, 0.000196, 0.00014, 0.000108, 8.89e-05,
- 7.77e-05, 7.12e-05, 6.79e-05, 6.71e-05 } // 20
+ { 1.0, 0.0488, 0.00906, 0.0027, 0.00108, 0.00053, 0.000303, 0.000196, 0.00014, 0.000108, 8.89e-05, 7.77e-05,
+ 7.12e-05, 6.79e-05, 6.71e-05 } // 20
}; // the first column is a dummy column representing K=0.
/**
@@ -147,7 +147,8 @@
// we allocate one more bucket per element to compensate the effect introduced by using blocked bloom filter
// a detail analysis can be found at https://dl.acm.org/citation.cfm?id=1594230
- return new BloomFilterSpecification(K, bucketsPerElement + 1); }
+ return new BloomFilterSpecification(K, bucketsPerElement + 1);
+ }
/**
* Calculates the maximum number of buckets per element that this implementation
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
index 499a01a..86497a8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
@@ -228,8 +228,8 @@
} else {
// segment has enough tuples: compress segment, extract prefix,
// write prefix tuple to buffer, and set prefix slot
- newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] = slotManager
- .encodeSlotFields(fieldCountToCompress, prefixFreeSpace);
+ newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] =
+ slotManager.encodeSlotFields(fieldCountToCompress, prefixFreeSpace);
prefixFreeSpace += tupleWriter.writeTupleFields(prevTuple, 0, fieldCountToCompress,
byteBuffer.array(), prefixFreeSpace);
@@ -237,8 +237,8 @@
for (int j = 0; j < tuplesInSegment; j++) {
int currTupleIndex = segmentStart + j;
tupleToWrite.resetByTupleIndex(frame, currTupleIndex);
- newTupleSlots[tupleCount - 1 - currTupleIndex] = slotManager.encodeSlotFields(
- prefixTupleIndex, tupleFreeSpace);
+ newTupleSlots[tupleCount - 1 - currTupleIndex] =
+ slotManager.encodeSlotFields(prefixTupleIndex, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTupleFields(tupleToWrite, fieldCountToCompress,
fieldCount - fieldCountToCompress, byteBuffer.array(), tupleFreeSpace);
}
@@ -257,16 +257,16 @@
} else {
// just write the tuple uncompressed
tupleToWrite.resetByTupleIndex(frame, tupleIndex);
- newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
- FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+ newTupleSlots[tupleCount - 1 - tupleIndex] =
+ slotManager.encodeSlotFields(FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
uncompressedTupleCount++;
}
} else {
// just write the tuple uncompressed
tupleToWrite.resetByTupleIndex(frame, tupleIndex);
- newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
- FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+ newTupleSlots[tupleCount - 1 - tupleIndex] =
+ slotManager.encodeSlotFields(FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
uncompressedTupleCount++;
}
@@ -282,15 +282,16 @@
// this can happen to to the greedy solution of the knapsack-like problem
// therefore, we check if the new space exceeds the page size to avoid the only danger of
// an increasing space
- int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize() + newPrefixSlots.length
- * slotManager.getSlotSize();
+ int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize()
+ + newPrefixSlots.length * slotManager.getSlotSize();
if (totalSpace > buf.capacity())
// just leave the page as is
return false;
// copy new tuple and new slots into original page
int freeSpaceAfterInit = frame.getOrigFreeSpaceOff();
- System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace - freeSpaceAfterInit);
+ System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit,
+ tupleFreeSpace - freeSpaceAfterInit);
// copy prefix slots
int slotOffRunner = buf.capacity() - slotManager.getSlotSize();
@@ -363,9 +364,8 @@
kp.pmi[j].matches++;
int prefixBytes = tupleWriter.bytesRequired(tuple, 0, prefixFieldsMatch);
- int spaceBenefit = tupleWriter.bytesRequired(tuple)
- - tupleWriter.bytesRequired(tuple, prefixFieldsMatch, tuple.getFieldCount()
- - prefixFieldsMatch);
+ int spaceBenefit = tupleWriter.bytesRequired(tuple) - tupleWriter.bytesRequired(tuple,
+ prefixFieldsMatch, tuple.getFieldCount() - prefixFieldsMatch);
if (kp.pmi[j].matches == occurrenceThreshold) {
// if we compress this prefix, we pay the cost of storing it once, plus
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
index 8b64318..cd0a2d3 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-btree/src/main/java/org/apache/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
@@ -57,7 +57,8 @@
}
// returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
- public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple) throws HyracksDataException {
+ public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple)
+ throws HyracksDataException {
int prefixMid;
int prefixBegin = 0;
int prefixEnd = frame.getPrefixTupleCount() - 1;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
index d0f2358..cedd764 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/IModificationOperationCallbackFactory.java
@@ -28,7 +28,6 @@
@FunctionalInterface
public interface IModificationOperationCallbackFactory extends Serializable {
- IModificationOperationCallback createModificationOperationCallback(LocalResource resource,
- IHyracksTaskContext ctx, IOperatorNodePushable operatorNodePushable)
- throws HyracksDataException;
+ IModificationOperationCallback createModificationOperationCallback(LocalResource resource, IHyracksTaskContext ctx,
+ IOperatorNodePushable operatorNodePushable) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
index 0a5eacc..c8020fc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ISearchOperationCallbackFactory.java
@@ -26,6 +26,6 @@
import org.apache.hyracks.storage.common.ISearchOperationCallback;
public interface ISearchOperationCallbackFactory extends Serializable {
- public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx, IOperatorNodePushable operatorNodePushable)
- throws HyracksDataException;
+ public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx,
+ IOperatorNodePushable operatorNodePushable) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java
index 3f8b6c1..90963bf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/api/ITreeIndexAccessor.java
@@ -45,6 +45,5 @@
* @throws HyracksDataException
* If the BufferCache throws while un/pinning or un/latching.
*/
- public void diskOrderScan(ITreeIndexCursor cursor)
- throws HyracksDataException;
+ public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java
index badcf27..898321b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexCreateOperatorDescriptor.java
@@ -32,8 +32,7 @@
private static final long serialVersionUID = 1L;
private final IIndexBuilderFactory indexBuilderFactory;
- public IndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec,
- IIndexBuilderFactory indexBuilderFactory) {
+ public IndexCreateOperatorDescriptor(IOperatorDescriptorRegistry spec, IIndexBuilderFactory indexBuilderFactory) {
super(spec, 0, 0);
this.indexBuilderFactory = indexBuilderFactory;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
index f75144a..aae830d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexDropOperatorNodePushable.java
@@ -87,7 +87,7 @@
return;
}
if (canRetry(e)) {
- LOGGER.info( "Retrying drop on exception", e);
+ LOGGER.info("Retrying drop on exception", e);
continue;
}
throw e;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
index f67424e..d55962a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/dataflow/IndexSearchOperatorNodePushable.java
@@ -181,8 +181,8 @@
ITupleReference tuple = cursor.getTuple();
writeTupleToOutput(tuple);
if (appendIndexFilter) {
- writeFilterTupleToOutput(((ILSMIndexCursor)cursor).getFilterMinTuple());
- writeFilterTupleToOutput(((ILSMIndexCursor)cursor).getFilterMaxTuple());
+ writeFilterTupleToOutput(((ILSMIndexCursor) cursor).getFilterMinTuple());
+ writeFilterTupleToOutput(((ILSMIndexCursor) cursor).getFilterMaxTuple());
}
FrameUtils.appendToWriter(writer, appender, tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java
index 92ba631..70333ac 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/AbstractSlotManager.java
@@ -42,8 +42,7 @@
@Override
public int getSlotEndOff() {
- return frame.getBuffer().capacity()
- - (frame.getTupleCount() * slotSize);
+ return frame.getBuffer().capacity() - (frame.getTupleCount() * slotSize);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
index de890c4..d0757c8 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
@@ -76,8 +76,8 @@
@Override
public int getSpace() {
- return buf.array().length - buf.getInt(Constants.FREE_SPACE_OFFSET) - (Integer.BYTES * buf.getInt(
- FREE_PAGE_COUNT_OFFSET));
+ return buf.array().length - buf.getInt(Constants.FREE_SPACE_OFFSET)
+ - (Integer.BYTES * buf.getInt(FREE_PAGE_COUNT_OFFSET));
}
@Override
@@ -209,8 +209,8 @@
private boolean isInner(IValueReference key, int tupleOffset) {
int keySize = buf.getInt(tupleOffset);
if (keySize == key.getLength()) {
- return LIFOMetaDataFrame.compare(key.getByteArray(), key.getStartOffset(), buf.array(), tupleOffset
- + Integer.BYTES, keySize) == 0;
+ return LIFOMetaDataFrame.compare(key.getByteArray(), key.getStartOffset(), buf.array(),
+ tupleOffset + Integer.BYTES, keySize) == 0;
}
return false;
}
@@ -253,8 +253,8 @@
int available = getSpace();
int required = key.getLength() + Integer.BYTES + Integer.BYTES + value.getLength();
if (available < required) {
- throw new HyracksDataException("Available space in the page ("
- + available + ") is not enough to store the key value pair(" + required + ")");
+ throw new HyracksDataException("Available space in the page (" + available
+ + ") is not enough to store the key value pair(" + required + ")");
}
buf.putInt(offset, key.getLength());
offset += Integer.BYTES;
@@ -294,14 +294,14 @@
@Override
public String toString() {
- StringBuilder aString = new StringBuilder(this.getClass().getSimpleName()).append('\n').
- append("Tuple Count: " + getTupleCount()).append('\n').
- append("Free Space offset: " + buf.getInt(Constants.FREE_SPACE_OFFSET)).append('\n').
- append("Level: " + buf.get(Constants.LEVEL_OFFSET)).append('\n').
- append("Version: " + buf.getInt(STORAGE_VERSION_OFFSET)).append('\n').
- append("Max Page: " + buf.getInt(MAX_PAGE_OFFSET)).append('\n').
- append("Root Page: " + buf.getInt(ROOT_PAGE_OFFSET)).append('\n').
- append("Number of free pages: " + buf.getInt(FREE_PAGE_COUNT_OFFSET));
+ StringBuilder aString = new StringBuilder(this.getClass().getSimpleName()).append('\n')
+ .append("Tuple Count: " + getTupleCount()).append('\n')
+ .append("Free Space offset: " + buf.getInt(Constants.FREE_SPACE_OFFSET)).append('\n')
+ .append("Level: " + buf.get(Constants.LEVEL_OFFSET)).append('\n')
+ .append("Version: " + buf.getInt(STORAGE_VERSION_OFFSET)).append('\n')
+ .append("Max Page: " + buf.getInt(MAX_PAGE_OFFSET)).append('\n')
+ .append("Root Page: " + buf.getInt(ROOT_PAGE_OFFSET)).append('\n')
+ .append("Number of free pages: " + buf.getInt(FREE_PAGE_COUNT_OFFSET));
int tupleCount = getTupleCount();
int offset;
for (int i = 0; i < tupleCount; i++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java
index a051364..5c389d2 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/AppendOnlyLinkedMetadataPageManager.java
@@ -345,10 +345,8 @@
try {
frame.setPage(page);
int inPageOffset = frame.getOffset(key);
- return inPageOffset >= 0
- ? ((long) pageId * bufferCache.getPageSizeWithHeader()) + frame.getOffset(key)
- + IBufferCache.RESERVED_HEADER_BYTES
- : -1L;
+ return inPageOffset >= 0 ? ((long) pageId * bufferCache.getPageSizeWithHeader()) + frame.getOffset(key)
+ + IBufferCache.RESERVED_HEADER_BYTES : -1L;
} finally {
page.releaseReadLatch();
unpinPage(page);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java
index d8afd12..951d824 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/freepage/LinkedMetaDataPageManager.java
@@ -240,8 +240,8 @@
@Override
public void close() throws HyracksDataException {
if (ready) {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()),
- false);
+ ICachedPage metaNode =
+ bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()), false);
ITreeIndexMetadataFrame metaFrame = frameFactory.createFrame();
metaNode.acquireWriteLatch();
try {
@@ -319,8 +319,8 @@
public long getFileOffset(ITreeIndexMetadataFrame frame, IValueReference key) throws HyracksDataException {
int metadataPageNum = getMetadataPageId();
if (metadataPageNum != IBufferCache.INVALID_PAGEID) {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()),
- false);
+ ICachedPage metaNode =
+ bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, getMetadataPageId()), false);
metaNode.acquireReadLatch();
try {
frame.setPage(metaNode);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java
index 8245338..15aba57 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallback.java
@@ -27,7 +27,7 @@
/**
* Dummy operation callback that simply does nothing.
*/
-public enum NoOpOperationCallback implements IModificationOperationCallback,ISearchOperationCallback {
+public enum NoOpOperationCallback implements IModificationOperationCallback, ISearchOperationCallback {
INSTANCE;
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
index 4ef89d1..925642e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/impls/NoOpOperationCallbackFactory.java
@@ -36,7 +36,8 @@
INSTANCE;
@Override
- public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx, IOperatorNodePushable operatorNodePushable) {
+ public ISearchOperationCallback createSearchOperationCallback(long resourceId, IHyracksTaskContext ctx,
+ IOperatorNodePushable operatorNodePushable) {
return NoOpOperationCallback.INSTANCE;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java
index ce2aa38..b192de7 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleMode.java
@@ -20,5 +20,8 @@
package org.apache.hyracks.storage.am.common.ophelpers;
public enum FindTupleMode {
- INCLUSIVE, EXCLUSIVE, EXCLUSIVE_ERROR_IF_EXISTS, EXACT
+ INCLUSIVE,
+ EXCLUSIVE,
+ EXCLUSIVE_ERROR_IF_EXISTS,
+ EXACT
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
index e9dfaff..2081122 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
@@ -20,5 +20,7 @@
package org.apache.hyracks.storage.am.common.ophelpers;
public enum FindTupleNoExactMatchPolicy {
- LOWER_KEY, HIGHER_KEY, NONE
+ LOWER_KEY,
+ HIGHER_KEY,
+ NONE
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java
index 609c51a..e82b037 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/tuples/SimpleTupleReference.java
@@ -98,7 +98,7 @@
@Override
public int getTupleSize() {
- return nullFlagsBytes + fieldSlotsBytes + ShortPointable.getShort(buf, tupleStartOff + nullFlagsBytes
- + (fieldCount - 1) * 2);
+ return nullFlagsBytes + fieldSlotsBytes
+ + ShortPointable.getShort(buf, tupleStartOff + nullFlagsBytes + (fieldCount - 1) * 2);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java
index 4ee9f53..ad0b030 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexStats.java
@@ -75,41 +75,30 @@
strBuilder.append("TREE LEVELS: " + treeLevels + "\n");
strBuilder.append("FREE PAGES : " + freePages + "\n");
strBuilder.append("META PAGES : " + metaPages + "\n");
- long totalPages = interiorStats.getNumPages() + leafStats.getNumPages()
- + freePages + metaPages;
+ long totalPages = interiorStats.getNumPages() + leafStats.getNumPages() + freePages + metaPages;
strBuilder.append("TOTAL PAGES : " + totalPages + "\n");
strBuilder.append("\n");
strBuilder.append("ROOT STATS" + "\n");
- strBuilder
- .append("NUM TUPLES: " + rootStats.getNumTuples() + "\n");
- strBuilder.append("FILL FACTOR : "
- + df.format(rootStats.getAvgFillFactor()) + "\n");
+ strBuilder.append("NUM TUPLES: " + rootStats.getNumTuples() + "\n");
+ strBuilder.append("FILL FACTOR : " + df.format(rootStats.getAvgFillFactor()) + "\n");
if (interiorStats.getNumPages() > 0) {
strBuilder.append("\n");
strBuilder.append("INTERIOR STATS" + "\n");
- strBuilder.append("NUM PAGES: " + interiorStats.getNumPages()
- + "\n");
- strBuilder.append("NUM TUPLES: "
- + interiorStats.getNumTuples() + "\n");
- strBuilder.append("AVG TUPLES/PAGE: "
- + df.format(interiorStats.getAvgNumTuples()) + "\n");
- strBuilder.append("AVG FILL FACTOR: "
- + df.format(interiorStats.getAvgFillFactor()) + "\n");
+ strBuilder.append("NUM PAGES: " + interiorStats.getNumPages() + "\n");
+ strBuilder.append("NUM TUPLES: " + interiorStats.getNumTuples() + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(interiorStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(interiorStats.getAvgFillFactor()) + "\n");
}
if (leafStats.getNumPages() > 0) {
strBuilder.append("\n");
strBuilder.append("LEAF STATS" + "\n");
- strBuilder.append("NUM PAGES: "
- + df.format(leafStats.getNumPages()) + "\n");
- strBuilder.append("NUM TUPLES: "
- + df.format(leafStats.getNumTuples()) + "\n");
- strBuilder.append("AVG TUPLES/PAGE: "
- + df.format(leafStats.getAvgNumTuples()) + "\n");
- strBuilder.append("AVG FILL FACTOR: "
- + df.format(leafStats.getAvgFillFactor()) + "\n");
+ strBuilder.append("NUM PAGES: " + df.format(leafStats.getNumPages()) + "\n");
+ strBuilder.append("NUM TUPLES: " + df.format(leafStats.getNumTuples()) + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(leafStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(leafStats.getAvgFillFactor()) + "\n");
}
return strBuilder.toString();
@@ -130,8 +119,7 @@
public void add(ITreeIndexFrame frame) {
numPages++;
numTuples += frame.getTupleCount();
- sumFillFactors += (double) (frame.getBuffer().capacity() - frame
- .getTotalFreeSpace())
+ sumFillFactors += (double) (frame.getBuffer().capacity() - frame.getTotalFreeSpace())
/ (double) frame.getBuffer().capacity();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java
index 37ecfc0..ee31aaf 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/main/java/org/apache/hyracks/storage/am/common/util/TreeIndexUtils.java
@@ -27,7 +27,8 @@
@SuppressWarnings("rawtypes")
public class TreeIndexUtils {
- public static String printFrameTuples(ITreeIndexFrame frame, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+ public static String printFrameTuples(ITreeIndexFrame frame, ISerializerDeserializer[] fieldSerdes)
+ throws HyracksDataException {
StringBuilder strBuilder = new StringBuilder();
ITreeIndexTupleReference tuple = frame.createTupleReference();
for (int i = 0; i < frame.getTupleCount(); i++) {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java
index fbb930d..0df558f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-common/src/test/java/org/apache/hyracks/storage/am/common/frames/LIFOMetadataFrameTest.java
@@ -41,8 +41,7 @@
Assert.assertNull(longPointable.getByteArray());
byte[] longBytes = new byte[Long.BYTES];
MutableArrayValueReference value = new MutableArrayValueReference(longBytes);
- int space = frame.getSpace() - (value.getLength() + Integer.BYTES * 2
- + testKey.getLength());
+ int space = frame.getSpace() - (value.getLength() + Integer.BYTES * 2 + testKey.getLength());
for (long l = 1L; l < 52L; l++) {
LongPointable.setLong(longBytes, 0, l);
frame.put(testKey, value);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
index 482ad38..0593ad5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
@@ -364,8 +364,7 @@
public LSMBTreeOpContext createOpContext(IModificationOperationCallback modificationCallback,
ISearchOperationCallback searchCallback) {
int numBloomFilterKeyFields = hasBloomFilter
- ? ((LSMBTreeWithBloomFilterDiskComponentFactory) componentFactory).getBloomFilterKeyFields().length
- : 0;
+ ? ((LSMBTreeWithBloomFilterDiskComponentFactory) componentFactory).getBloomFilterKeyFields().length : 0;
return new LSMBTreeOpContext(this, memoryComponents, insertLeafFrameFactory, deleteLeafFrameFactory,
modificationCallback, searchCallback, numBloomFilterKeyFields, getTreeFields(), getFilterFields(),
getHarness(), getFilterCmpFactories(), tracer);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
index 44002ae..7c924b5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-btree/src/main/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeCopyTupleWriter.java
@@ -32,7 +32,7 @@
public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
int tupleSize = bytesRequired(tuple);
byte[] buf = tuple.getFieldData(0);
- int tupleStartOff = ((LSMBTreeTupleReference)tuple).getTupleStart();
+ int tupleStartOff = ((LSMBTreeTupleReference) tuple).getTupleStart();
System.arraycopy(buf, tupleStartOff, targetBuf, targetOff, tupleSize);
return tupleSize;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java
index 0d58b85..2ca162d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/freepage/VirtualFreePageManagerFactory.java
@@ -24,6 +24,7 @@
public class VirtualFreePageManagerFactory implements IPageManagerFactory {
private static final long serialVersionUID = 1L;
+
@Override
public IPageManager createPageManager(IBufferCache bufferCache) {
return new VirtualFreePageManager(bufferCache);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java
index 7359d2b..625f81e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/FilterBulkLoader.java
@@ -35,9 +35,8 @@
protected final PermutingTupleReference filterTuple;
protected final MultiComparator filterCmp;
- public FilterBulkLoader(ILSMComponentFilter filter, ITreeIndex treeIndex,
- ILSMComponentFilterManager filterManager, int[] indexFields, int[] filterFields,
- MultiComparator filterCmp) {
+ public FilterBulkLoader(ILSMComponentFilter filter, ITreeIndex treeIndex, ILSMComponentFilterManager filterManager,
+ int[] indexFields, int[] filterFields, MultiComparator filterCmp) {
this.filter = filter;
this.treeIndex = treeIndex;
this.filterManager = filterManager;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
index b2c48e1..fa3093c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-common/src/main/java/org/apache/hyracks/storage/am/lsm/common/impls/LSMHarness.java
@@ -298,9 +298,8 @@
if (!inactiveDiskComponents.isEmpty()) {
for (ILSMDiskComponent inactiveComp : inactiveDiskComponents) {
if (inactiveComp.getFileReferenceCount() == 1) {
- inactiveDiskComponentsToBeDeleted =
- inactiveDiskComponentsToBeDeleted == null ? new LinkedList<>()
- : inactiveDiskComponentsToBeDeleted;
+ inactiveDiskComponentsToBeDeleted = inactiveDiskComponentsToBeDeleted == null
+ ? new LinkedList<>() : inactiveDiskComponentsToBeDeleted;
inactiveDiskComponentsToBeDeleted.add(inactiveComp);
}
}
@@ -627,8 +626,8 @@
boolean failedOperation = false;
try {
newComponent = lsmIndex.merge(operation);
- operation.getCallback()
- .afterOperation(LSMIOOperationType.MERGE, ctx.getComponentHolder(), newComponent);
+ operation.getCallback().afterOperation(LSMIOOperationType.MERGE, ctx.getComponentHolder(),
+ newComponent);
newComponent.markAsValid(lsmIndex.isDurable());
} catch (Throwable e) { // NOSONAR: Log and re-throw
failedOperation = true;
@@ -808,8 +807,7 @@
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (LOGGER.isWarnEnabled()) {
- LOGGER.log(Level.WARN, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex,
- e);
+ LOGGER.log(Level.WARN, "Ignoring interrupt while waiting for lagging merge on " + lsmIndex, e);
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
index bd82822..9cfaf7a 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/inmemory/PartitionedInMemoryInvertedIndexOpContext.java
@@ -34,7 +34,7 @@
protected void setTokenizingTupleIterator() {
IBinaryTokenizer tokenizer = getTokenizerFactory().createTokenizer();
- setTupleIter(new PartitionedInvertedIndexTokenizingTupleIterator(tokenCmpFactories.length, btree.getFieldCount()
- - tokenCmpFactories.length, tokenizer));
+ setTupleIter(new PartitionedInvertedIndexTokenizingTupleIterator(tokenCmpFactories.length,
+ btree.getFieldCount() - tokenCmpFactories.length, tokenizer));
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
index 56520b8..94ce348 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/ondisk/FixedSizeElementInvertedListCursor.java
@@ -117,8 +117,8 @@
currentPageIx = binarySearch(elementIndexes, 0, numPages, elementIx);
if (currentPageIx < 0) {
- throw new IndexOutOfBoundsException("Requested index: " + elementIx + " from array with numElements: "
- + numElements);
+ throw new IndexOutOfBoundsException(
+ "Requested index: " + elementIx + " from array with numElements: " + numElements);
}
if (currentPageIx == 0) {
@@ -223,8 +223,8 @@
public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException {
StringBuilder strBuilder = new StringBuilder();
for (int i = 0; i < tuple.getFieldCount(); i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Object o = serdes[i].deserialize(dataIn);
strBuilder.append(o.toString());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
index 84c453b..5846e25 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ArrayListFactory.java
@@ -23,7 +23,7 @@
import org.apache.hyracks.storage.am.lsm.invertedindex.api.IObjectFactory;
-public class ArrayListFactory<T> implements IObjectFactory<ArrayList<T>>{
+public class ArrayListFactory<T> implements IObjectFactory<ArrayList<T>> {
@Override
public ArrayList<T> create() {
return new ArrayList<T>();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java
index 6116322..decc499 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/ConjunctiveEditDistanceSearchModifier.java
@@ -21,7 +21,6 @@
public class ConjunctiveEditDistanceSearchModifier extends EditDistanceSearchModifier {
-
public ConjunctiveEditDistanceSearchModifier(int gramLength, int edThresh) {
super(gramLength, edThresh);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
index 4269aa7..4c9f037 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/search/TOccurrenceSearcher.java
@@ -34,8 +34,7 @@
protected final ArrayList<IInvertedListCursor> invListCursors = new ArrayList<>();
- public TOccurrenceSearcher(IHyracksCommonContext ctx, IInPlaceInvertedIndex invIndex)
- throws HyracksDataException {
+ public TOccurrenceSearcher(IHyracksCommonContext ctx, IInPlaceInvertedIndex invIndex) throws HyracksDataException {
super(ctx, invIndex);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
index ccc2c81..ed2f3be 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/AbstractUTF8Token.java
@@ -115,8 +115,7 @@
// The preChar and postChar are required to be a single byte utf8 char, e.g. ASCII char.
protected void serializeToken(UTF8StringBuilder builder, GrowableArray out, int numPreChars, int numPostChars,
- char preChar, char postChar)
- throws IOException {
+ char preChar, char postChar) throws IOException {
handleTokenTypeTag(out.getDataOutput());
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
index cd37ffa..3a5224c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
@@ -72,10 +72,11 @@
int tokenStart = tokensStart.get(i);
curTokenCount++; // assume we found it
int offset = 0;
- for (int charPos= 0; charPos < tokenLength; charPos++) {
+ for (int charPos = 0; charPos < tokenLength; charPos++) {
// case insensitive comparison
- if (Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset))
- != Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tokenStart + offset))) {
+ if (Character.toLowerCase(
+ UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset)) != Character
+ .toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tokenStart + offset))) {
curTokenCount--;
break;
}
@@ -93,7 +94,6 @@
tokenCount++;
}
-
// TODO Why we bother to get the tokenCount in advance? It seems a caller's problem.
@Override
public short getTokensCount() {
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
index 81254fc..e583c7d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
@@ -19,16 +19,15 @@
package org.apache.hyracks.storage.am.lsm.invertedindex.tokenizers;
-public class DelimitedUTF8StringBinaryTokenizerFactory implements
- IBinaryTokenizerFactory {
+public class DelimitedUTF8StringBinaryTokenizerFactory implements IBinaryTokenizerFactory {
private static final long serialVersionUID = 1L;
private final boolean ignoreTokenCount;
private final boolean sourceHasTypeTag;
private final ITokenFactory tokenFactory;
- public DelimitedUTF8StringBinaryTokenizerFactory(boolean ignoreTokenCount,
- boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
+ public DelimitedUTF8StringBinaryTokenizerFactory(boolean ignoreTokenCount, boolean sourceHasTypeTag,
+ ITokenFactory tokenFactory) {
this.ignoreTokenCount = ignoreTokenCount;
this.sourceHasTypeTag = sourceHasTypeTag;
this.tokenFactory = tokenFactory;
@@ -36,7 +35,6 @@
@Override
public IBinaryTokenizer createTokenizer() {
- return new DelimitedUTF8StringBinaryTokenizer(ignoreTokenCount,
- sourceHasTypeTag, tokenFactory);
+ return new DelimitedUTF8StringBinaryTokenizer(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
index 8bd0c50..711a82f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
@@ -79,8 +79,9 @@
tokenCount++; // assume found
int offset = 0;
for (int j = 0; j < gramLength; j++) {
- if (Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset))
- != Character.toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tmpIndex + offset))) {
+ if (Character
+ .toLowerCase(UTF8StringUtil.charAt(sentenceBytes, currentTokenStart + offset)) != Character
+ .toLowerCase(UTF8StringUtil.charAt(sentenceBytes, tmpIndex + offset))) {
tokenCount--;
break;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
index 2ade6db..78c8e4d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-invertedindex/src/main/java/org/apache/hyracks/storage/am/lsm/invertedindex/util/InvertedIndexTokenizingTupleIterator.java
@@ -39,7 +39,8 @@
protected final IBinaryTokenizer tokenizer;
protected ITupleReference inputTuple;
- public InvertedIndexTokenizingTupleIterator(int tokensFieldCount, int invListFieldCount, IBinaryTokenizer tokenizer) {
+ public InvertedIndexTokenizingTupleIterator(int tokensFieldCount, int invListFieldCount,
+ IBinaryTokenizer tokenizer) {
this.invListFieldCount = invListFieldCount;
this.tupleBuilder = new ArrayTupleBuilder(tokensFieldCount + invListFieldCount);
this.tupleReference = new ArrayTupleReference();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
index a477baf..e4267e2 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-lsm-rtree/src/main/java/org/apache/hyracks/storage/am/lsm/rtree/impls/LSMRTreeAbstractCursor.java
@@ -175,7 +175,6 @@
open = false;
}
-
@Override
public ITupleReference getTuple() {
return frameTuple;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
index 1825003..1ca75cb 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/api/IGenericPrimitiveSerializerDeserializer.java
@@ -21,7 +21,6 @@
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
-public interface IGenericPrimitiveSerializerDeserializer<T> extends
- ISerializerDeserializer<T> {
+public interface IGenericPrimitiveSerializerDeserializer<T> extends ISerializerDeserializer<T> {
public double getValue(byte[] bytes, int offset);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
index 7ae616a..1681eee 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RStarTreePolicy.java
@@ -71,10 +71,12 @@
@Override
public void split(ITreeIndexFrame leftFrame, ByteBuffer buf, ITreeIndexFrame rightFrame, ISlotManager slotManager,
- ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey) throws HyracksDataException {
+ ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey)
+ throws HyracksDataException {
RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
RTreeTypeAwareTupleWriter rTreeTupleWriterleftRTreeFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame =
+ ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
RTreeNSMFrame leftRTreeFrame = ((RTreeNSMFrame) leftFrame);
@@ -92,10 +94,10 @@
for (int k = 0; k < leftRTreeFrame.getTupleCount(); ++k) {
frameTuple.resetByTupleIndex(leftRTreeFrame, k);
- double LowerKey = keyValueProviders[i]
- .getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
- double UpperKey = keyValueProviders[j]
- .getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
+ double LowerKey =
+ keyValueProviders[i].getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
+ double UpperKey =
+ keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
tupleEntries1.add(k, LowerKey);
tupleEntries2.add(k, UpperKey);
@@ -186,8 +188,8 @@
if (tupleEntries1.get(i).getTupleIndex() != -1) {
frameTuple.resetByTupleIndex(leftRTreeFrame, tupleEntries1.get(i).getTupleIndex());
rightFrame.insert(frameTuple, -1);
- ((UnorderedSlotManager) slotManager).modifySlot(
- slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+ ((UnorderedSlotManager) slotManager)
+ .modifySlot(slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
totalBytes += leftRTreeFrame.getTupleSize(frameTuple);
numOfDeletedTuples++;
} else {
@@ -198,8 +200,8 @@
((UnorderedSlotManager) slotManager).deleteEmptySlots();
// maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
- + (slotManager.getSlotSize() * numOfDeletedTuples));
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
// compact both pages
rightFrame.compact();
@@ -238,8 +240,8 @@
tupleEntries2.clear();
}
- public void generateDist(ITreeIndexFrame leftRTreeFrame, ITreeIndexTupleReference frameTuple,
- ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end) {
+ public void generateDist(ITreeIndexFrame leftRTreeFrame, ITreeIndexTupleReference frameTuple, ITupleReference tuple,
+ TupleEntryArrayList entries, Rectangle rec, int start, int end) {
int j = 0;
while (entries.get(j).getTupleIndex() == -1) {
j++;
@@ -302,20 +304,19 @@
int c = ((RTreeNSMInteriorFrame) frame).pointerCmp(frameTuple, cmpFrameTuple, cmp);
if (c != 0) {
- double intersection = RTreeComputationUtils.overlappedArea(frameTuple, tuple,
- cmpFrameTuple, cmp, keyValueProviders);
+ double intersection = RTreeComputationUtils.overlappedArea(frameTuple, tuple, cmpFrameTuple,
+ cmp, keyValueProviders);
if (intersection != 0.0) {
- difference += intersection
- - RTreeComputationUtils.overlappedArea(frameTuple, null, cmpFrameTuple, cmp,
- keyValueProviders);
+ difference += intersection - RTreeComputationUtils.overlappedArea(frameTuple, null,
+ cmpFrameTuple, cmp, keyValueProviders);
}
} else {
id = j;
}
}
- double enlargedArea = RTreeComputationUtils.enlargedArea(cmpFrameTuple, tuple, cmp,
- keyValueProviders);
+ double enlargedArea =
+ RTreeComputationUtils.enlargedArea(cmpFrameTuple, tuple, cmp, keyValueProviders);
if (difference < minOverlap) {
minOverlap = difference;
minEnlargedArea = enlargedArea;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
index 31ade3c..281dff5 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeComputationUtils.java
@@ -71,8 +71,8 @@
double pHigh1, pLow1;
if (tupleToBeInserted != null) {
int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
- tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i), tupleToBeInserted.getFieldLength(i));
+ tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i),
+ tupleToBeInserted.getFieldLength(i));
if (c < 0) {
pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
} else {
@@ -81,8 +81,8 @@
}
c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
- tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j), tupleToBeInserted.getFieldLength(j));
+ tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j),
+ tupleToBeInserted.getFieldLength(j));
if (c > 0) {
pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
} else {
@@ -124,16 +124,16 @@
int maxFieldPos = cmp.getKeyFieldCount() / 2;
for (int i = 0; i < maxFieldPos; i++) {
int j = maxFieldPos + i;
- int c = cmp.getComparators()[i]
- .compare(tuple1.getFieldData(i), tuple1.getFieldStart(i), tuple1.getFieldLength(i),
- tuple2.getFieldData(i), tuple2.getFieldStart(i), tuple2.getFieldLength(i));
+ int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
+ tuple1.getFieldLength(i), tuple2.getFieldData(i), tuple2.getFieldStart(i),
+ tuple2.getFieldLength(i));
if (c > 0) {
return false;
}
- c = cmp.getComparators()[j]
- .compare(tuple1.getFieldData(j), tuple1.getFieldStart(j), tuple1.getFieldLength(j),
- tuple2.getFieldData(j), tuple2.getFieldStart(j), tuple2.getFieldLength(j));
+ c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
+ tuple1.getFieldLength(j), tuple2.getFieldData(j), tuple2.getFieldStart(j),
+ tuple2.getFieldLength(j));
if (c < 0) {
return false;
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
index ba542ea..05d04f6 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -115,8 +115,7 @@
@Override
public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey,
- IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache)
- throws HyracksDataException {
+ IExtraPageBlockHelper extraPageBlockHelper, IBufferCache bufferCache) throws HyracksDataException {
rtreePolicy.split(this, buf, rightFrame, slotManager, frameTuple, tuple, splitKey);
}
@@ -166,15 +165,12 @@
@Override
public String toString() {
- return new StringBuilder(this.getClass().getSimpleName()).append('\n').append(
- "Tuple Count: " + getTupleCount()).append('\n').append("Free Space offset: " + buf
- .getInt(Constants.FREE_SPACE_OFFSET)).append('\n').append("Level: " + buf
- .get(Constants.LEVEL_OFFSET)).append('\n').append("LSN: "
- + buf.getLong(PAGE_LSN_OFFSET)).append('\n').append(
- "Total Free Space: " + buf.getInt(TOTAL_FREE_SPACE_OFFSET)).append(
- '\n').append("Flag: " + buf.get(
- FLAG_OFFSET)).append('\n')
- .append("NSN: " + buf.getLong(PAGE_NSN_OFFSET)).append('\n').append("Right Page:")
- .append(buf.getInt(RIGHT_PAGE_OFFSET)).toString();
+ return new StringBuilder(this.getClass().getSimpleName()).append('\n').append("Tuple Count: " + getTupleCount())
+ .append('\n').append("Free Space offset: " + buf.getInt(Constants.FREE_SPACE_OFFSET)).append('\n')
+ .append("Level: " + buf.get(Constants.LEVEL_OFFSET)).append('\n')
+ .append("LSN: " + buf.getLong(PAGE_LSN_OFFSET)).append('\n')
+ .append("Total Free Space: " + buf.getInt(TOTAL_FREE_SPACE_OFFSET)).append('\n')
+ .append("Flag: " + buf.get(FLAG_OFFSET)).append('\n').append("NSN: " + buf.getLong(PAGE_NSN_OFFSET))
+ .append('\n').append("Right Page:").append(buf.getInt(RIGHT_PAGE_OFFSET)).toString();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
index b8b5a8c..5550e1f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -38,8 +38,8 @@
for (int i = 0; i < keyValueProviders.length; i++) {
keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
}
- return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders,
- rtreePolicyType, isPointMBR);
+ return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders, rtreePolicyType,
+ isPointMBR);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
index e112b86..16b22c0 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -38,8 +38,8 @@
for (int i = 0; i < keyValueProviders.length; i++) {
keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
}
- return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders,
- rtreePolicyType, isPointMBR);
+ return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders, rtreePolicyType,
+ isPointMBR);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java
index 0dea4c2..623ef21 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicy.java
@@ -62,10 +62,12 @@
@Override
public void split(ITreeIndexFrame leftFrame, ByteBuffer buf, ITreeIndexFrame rightFrame, ISlotManager slotManager,
- ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey) throws HyracksDataException {
+ ITreeIndexTupleReference frameTuple, ITupleReference tuple, ISplitKey splitKey)
+ throws HyracksDataException {
RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame =
+ ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
RTreeNSMFrame leftRTreeFrame = ((RTreeNSMFrame) leftFrame);
@@ -75,10 +77,10 @@
for (int i = 0; i < maxFieldPos; i++) {
int j = maxFieldPos + i;
frameTuple.resetByTupleIndex(leftRTreeFrame, 0);
- double leastLowerValue = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i));
- double greatestUpperValue = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j));
+ double leastLowerValue =
+ keyValueProviders[i].getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
+ double greatestUpperValue =
+ keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
double leastUpperValue = leastLowerValue;
double greatestLowerValue = greatestUpperValue;
int leastUpperIndex = 0;
@@ -88,16 +90,16 @@
int tupleCount = leftRTreeFrame.getTupleCount();
for (int k = 1; k < tupleCount; ++k) {
frameTuple.resetByTupleIndex(leftRTreeFrame, k);
- double lowerValue = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i));
+ double lowerValue =
+ keyValueProviders[i].getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
if (lowerValue > greatestLowerValue) {
greatestLowerIndex = k;
cmpFrameTuple.resetByTupleIndex(leftRTreeFrame, k);
greatestLowerValue = keyValueProviders[i].getValue(cmpFrameTuple.getFieldData(i),
cmpFrameTuple.getFieldStart(i));
}
- double higherValue = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j));
+ double higherValue =
+ keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
if (higherValue < leastUpperValue) {
leastUpperIndex = k;
cmpFrameTuple.resetByTupleIndex(leftRTreeFrame, k);
@@ -169,8 +171,8 @@
((UnorderedSlotManager) slotManager).deleteEmptySlots();
// maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
- + (slotManager.getSlotSize() * numOfDeletedTuples));
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
// compact both pages
rightFrame.compact();
@@ -196,7 +198,8 @@
splitKey.initData(splitKeySize);
leftRTreeFrame.adjustMBR();
- rTreeTupleWriterLeftFrame.writeTupleFields(leftRTreeFrame.getMBRTuples(), 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeTupleWriterLeftFrame.writeTupleFields(leftRTreeFrame.getMBRTuples(), 0, rTreeSplitKey.getLeftPageBuffer(),
+ 0);
rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer().array(), 0);
((IRTreeFrame) rightFrame).adjustMBR();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java
index 8ca9842..d9dbd81 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/frames/RTreePolicyType.java
@@ -20,5 +20,6 @@
package org.apache.hyracks.storage.am.rtree.frames;
public enum RTreePolicyType {
- RTREE, RSTARTREE
+ RTREE,
+ RSTARTREE
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java
index d6d69bb..8798241 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/EntriesOrder.java
@@ -20,5 +20,6 @@
package org.apache.hyracks.storage.am.rtree.impls;
public enum EntriesOrder {
- ASCENDING, DESCENDING
+ ASCENDING,
+ DESCENDING
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
index f6bdcfb..11a5b2b 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
@@ -35,8 +35,7 @@
public int keySize = 0;
- public RTreeSplitKey(ITreeIndexTupleReference leftTuple,
- ITreeIndexTupleReference rightTuple) {
+ public RTreeSplitKey(ITreeIndexTupleReference leftTuple, ITreeIndexTupleReference rightTuple) {
this.leftTuple = leftTuple;
this.rightTuple = rightTuple;
}
@@ -114,8 +113,7 @@
rightPageBuf.putInt(keySize, page);
}
- public ISplitKey duplicate(ITreeIndexTupleReference copyLeftTuple,
- ITreeIndexTupleReference copyRightTuple) {
+ public ISplitKey duplicate(ITreeIndexTupleReference copyLeftTuple, ITreeIndexTupleReference copyRightTuple) {
RTreeSplitKey copy = new RTreeSplitKey(copyLeftTuple, copyRightTuple);
copy.leftPageData = leftPageData.clone();
copy.leftPageBuf = ByteBuffer.wrap(copy.leftPageData);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java
index cbfd245..c74f712 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/impls/Rectangle.java
@@ -64,13 +64,13 @@
public void enlarge(ITupleReference tupleToBeInserted, IPrimitiveValueProvider[] valueProviders) {
for (int i = 0; i < getDim(); i++) {
int j = getDim() + i;
- double low = valueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i));
+ double low =
+ valueProviders[i].getValue(tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i));
if (getLow(i) > low) {
setLow(i, low);
}
- double high = valueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j));
+ double high =
+ valueProviders[j].getValue(tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j));
if (getHigh(i) < high) {
setHigh(i, high);
}
@@ -84,8 +84,8 @@
for (int i = 0; i < getDim(); i++) {
int j = getDim() + i;
- double low = valueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i));
+ double low =
+ valueProviders[i].getValue(tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i));
double lowAfterEnlargement;
if (getLow(i) > low) {
lowAfterEnlargement = low;
@@ -93,8 +93,8 @@
lowAfterEnlargement = getLow(i);
}
- double high = valueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j));
+ double high =
+ valueProviders[j].getValue(tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j));
double highAfterEnlargement;
if (getHigh(i) < high) {
highAfterEnlargement = high;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
index 3194674..447940f 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/HilbertDoubleComparator.java
@@ -54,8 +54,8 @@
private IntArrayList stateStack = new IntArrayList(1000, 200);
private DoubleArrayList boundsStack = new DoubleArrayList(2000, 400);
- private IPrimitiveValueProvider valueProvider = DoublePrimitiveValueProviderFactory.INSTANCE
- .createPrimitiveValueProvider();
+ private IPrimitiveValueProvider valueProvider =
+ DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
private double[] a;
private double[] b;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
index 30eb991..13aed8c 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-am-rtree/src/main/java/org/apache/hyracks/storage/am/rtree/linearize/ZCurveDoubleComparator.java
@@ -36,8 +36,8 @@
private double stepsize;
private DoubleArrayList boundsStack = new DoubleArrayList(2000, 400);
- private IPrimitiveValueProvider valueProvider = DoublePrimitiveValueProviderFactory.INSTANCE
- .createPrimitiveValueProvider();
+ private IPrimitiveValueProvider valueProvider =
+ DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
private double[] a;
private double[] b;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java
index b4f364c..dbead1e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/AsyncFIFOPageQueueManager.java
@@ -34,7 +34,7 @@
protected BufferCache bufferCache;
volatile protected PageQueue pageQueue;
- public AsyncFIFOPageQueueManager(BufferCache bufferCache){
+ public AsyncFIFOPageQueueManager(BufferCache bufferCache) {
this.bufferCache = bufferCache;
}
@@ -43,7 +43,8 @@
public final IFIFOPageWriter writer;
protected PageQueue(IBufferCache bufferCache, IFIFOPageWriter writer) {
- if(DEBUG) System.out.println("[FIFO] New Queue");
+ if (DEBUG)
+ System.out.println("[FIFO] New Queue");
this.bufferCache = bufferCache;
this.writer = writer;
}
@@ -59,10 +60,9 @@
@Override
public void put(ICachedPage page) throws HyracksDataException {
try {
- if(!poisoned.get()) {
+ if (!poisoned.get()) {
queue.put(page);
- }
- else{
+ } else {
throw new HyracksDataException("Queue is closing");
}
} catch (InterruptedException e) {
@@ -72,22 +72,21 @@
}
}
-
public PageQueue createQueue(IFIFOPageWriter writer) {
if (pageQueue == null) {
- synchronized(this){
+ synchronized (this) {
if (pageQueue == null) {
writerThread = new Thread(this);
writerThread.setName("FIFO Writer Thread");
writerThread.start();
- pageQueue = new PageQueue(bufferCache,writer);
+ pageQueue = new PageQueue(bufferCache, writer);
}
}
}
return pageQueue;
}
- public void destroyQueue(){
+ public void destroyQueue() {
poisoned.set(true);
if (writerThread == null) {
synchronized (this) {
@@ -99,16 +98,16 @@
//Dummy cached page to act as poison pill
CachedPage poisonPill = new CachedPage();
- poisonPill.setQueueInfo(new QueueInfo(true,true));
+ poisonPill.setQueueInfo(new QueueInfo(true, true));
- try{
+ try {
synchronized (poisonPill) {
queue.put(poisonPill);
- while(queue.contains(poisonPill)){
+ while (queue.contains(poisonPill)) {
poisonPill.wait();
}
}
- } catch (InterruptedException e){
+ } catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@@ -139,26 +138,30 @@
@Override
public void run() {
- if (DEBUG) System.out.println("[FIFO] Writer started");
+ if (DEBUG)
+ System.out.println("[FIFO] Writer started");
boolean die = false;
while (!die) {
ICachedPage entry;
try {
entry = queue.take();
- } catch(InterruptedException e) {
+ } catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
- if (entry.getQueueInfo() != null && entry.getQueueInfo().hasWaiters()){
- synchronized(entry) {
- if(entry.getQueueInfo().isPoison()) { die = true; }
+ if (entry.getQueueInfo() != null && entry.getQueueInfo().hasWaiters()) {
+ synchronized (entry) {
+ if (entry.getQueueInfo().isPoison()) {
+ die = true;
+ }
entry.notifyAll();
continue;
}
}
- if (DEBUG) System.out.println("[FIFO] Write " + BufferedFileHandle.getFileId(((CachedPage)entry).dpid)+","
- + BufferedFileHandle.getPageId(((CachedPage)entry).dpid));
+ if (DEBUG)
+ System.out.println("[FIFO] Write " + BufferedFileHandle.getFileId(((CachedPage) entry).dpid) + ","
+ + BufferedFileHandle.getPageId(((CachedPage) entry).dpid));
try {
pageQueue.getWriter().write(entry, bufferCache);
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
index 302c7b2..1443bbc 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/BufferCache.java
@@ -1307,21 +1307,17 @@
finishQueue();
if (cycleCount > MAX_PIN_ATTEMPT_CYCLES) {
cycleCount = 0; // suppress warning below
- throw new HyracksDataException(
- "Unable to find free page in buffer cache after " + MAX_PIN_ATTEMPT_CYCLES
- + " cycles (buffer cache undersized?)" + (DEBUG
- ? " ; " + (masterPinCount.get() - startingPinCount)
- + " successful pins since start of cycle"
- : ""));
+ throw new HyracksDataException("Unable to find free page in buffer cache after "
+ + MAX_PIN_ATTEMPT_CYCLES + " cycles (buffer cache undersized?)"
+ + (DEBUG ? " ; " + (masterPinCount.get() - startingPinCount)
+ + " successful pins since start of cycle" : ""));
}
}
} finally {
if (cycleCount > PIN_ATTEMPT_CYCLES_WARNING_THRESHOLD && LOGGER.isWarnEnabled()) {
LOGGER.warn("Took " + cycleCount + " cycles to find free page in buffer cache. (buffer cache "
- + "undersized?)" + (DEBUG
- ? " ; " + (masterPinCount.get() - startingPinCount)
- + " successful pins since start of cycle"
- : ""));
+ + "undersized?)" + (DEBUG ? " ; " + (masterPinCount.get() - startingPinCount)
+ + " successful pins since start of cycle" : ""));
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
index a6a3bc8..87a15d3 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/ClockPageReplacementStrategy.java
@@ -170,7 +170,7 @@
return;
}
final int newSize = pageSize * multiplier;
- ByteBuffer oldBuffer = ((CachedPage)cPage).buffer;
+ ByteBuffer oldBuffer = ((CachedPage) cPage).buffer;
oldBuffer.position(0);
final int delta = multiplier - origMultiplier;
if (multiplier < origMultiplier) {
@@ -194,8 +194,7 @@
}
@Override
- public void fixupCapacityOnLargeRead(ICachedPageInternal cPage)
- throws HyracksDataException {
+ public void fixupCapacityOnLargeRead(ICachedPageInternal cPage) throws HyracksDataException {
ByteBuffer oldBuffer = ((CachedPage) cPage).buffer;
final int multiplier = cPage.getFrameSizeMultiplier();
final int newSize = pageSize * multiplier;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java
index 7380261..567c01e 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IFIFOPageWriter.java
@@ -17,7 +17,6 @@
import org.apache.hyracks.api.exceptions.HyracksDataException;
-
public interface IFIFOPageWriter {
public void write(ICachedPage page, BufferCache bufferCache) throws HyracksDataException;
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
index d3bcce5..bbf3b45 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/IPageReplacementStrategy.java
@@ -39,8 +39,7 @@
public int getNumPages();
- void fixupCapacityOnLargeRead(ICachedPageInternal cPage)
- throws HyracksDataException;
+ void fixupCapacityOnLargeRead(ICachedPageInternal cPage) throws HyracksDataException;
public int getPageSize();
diff --git a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java
index bc69bc8..d86319d 100644
--- a/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java
+++ b/hyracks-fullstack/hyracks/hyracks-storage-common/src/main/java/org/apache/hyracks/storage/common/buffercache/QueueInfo.java
@@ -18,24 +18,24 @@
*/
package org.apache.hyracks.storage.common.buffercache;
-public class QueueInfo implements IQueueInfo{
+public class QueueInfo implements IQueueInfo {
private final boolean poison;
private final boolean waiters;
- public QueueInfo(boolean waiters, boolean poison){
+ public QueueInfo(boolean waiters, boolean poison) {
this.waiters = waiters;
this.poison = poison;
}
@Override
- public boolean hasWaiters(){
+ public boolean hasWaiters() {
return waiters;
}
@Override
- public boolean isPoison(){
+ public boolean isPoison() {
return poison;
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
index cd6ea2e..9e863eb 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
@@ -39,8 +39,8 @@
@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
- ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
- throws Exception {
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
+ ITupleReference prefixHighKey) throws Exception {
OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, false);
ctx.getIndex().create();
ctx.getIndex().activate();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
index 7378cf1..b7cf4a0 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
@@ -54,8 +54,8 @@
} else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, false, getRandom());
}
- int numTuplesPerDeleteRound = (int) Math
- .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+ int numTuplesPerDeleteRound =
+ (int) Math.ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
for (int j = 0; j < numDeleteRounds; j++) {
orderedIndexTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
orderedIndexTestUtils.checkPointSearches(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java
index 9a08401..8aa3f14 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexSortedInsertTest.java
@@ -45,8 +45,8 @@
@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
- ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
- throws Exception {
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
+ ITupleReference prefixHighKey) throws Exception {
OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, false);
ctx.getIndex().create();
ctx.getIndex().activate();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
index 3dac0db..c43d41f 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestDriver.java
@@ -62,8 +62,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
}
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Range search in [-1000, 1000]
ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
@@ -79,8 +79,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
}
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Range search in [50 0, 50 500]
ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
@@ -101,9 +101,9 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
}
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
// Range search in [50 100, 100 100]
ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
@@ -124,8 +124,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
}
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Range search in ["cbf", cc7"]
ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
@@ -142,8 +142,8 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
}
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Range search in ["cbf", "ddd", cc7", "eee"]
ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
@@ -164,9 +164,9 @@
LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
}
- ISerializerDeserializer[] fieldSerdes = { new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ ISerializerDeserializer[] fieldSerdes =
+ { new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
// Range search in ["cbf", "ddd", cc7", "eee"]
ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
index 4a9e0ed..bf3c8e5 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -58,8 +58,8 @@
private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
for (int i = 0; i < fieldSerdes.length; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i),
- actual.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i), actual.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Object actualObj = fieldSerdes[i].deserialize(dataIn);
if (!actualObj.equals(expected.getField(i))) {
@@ -99,20 +99,20 @@
MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
IIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor(false);
- RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp,
- highKeyCmp);
+ RangePredicate rangePred =
+ new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp, highKeyCmp);
ctx.getIndexAccessor().search(searchCursor, rangePred);
// Get the subset of elements from the expected set within given key
// range.
CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, ctx.getFieldSerdes(), lowKeyCmp.getKeyFieldCount());
- CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(),
- highKeyCmp.getKeyFieldCount());
+ CheckTuple highKeyCheck =
+ createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(), highKeyCmp.getKeyFieldCount());
SortedSet<CheckTuple> expectedSubset = null;
if (lowKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()
|| highKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()) {
// Searching on a key prefix (low key or high key or both).
- expectedSubset = getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck,
- highKeyCheck);
+ expectedSubset =
+ getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck, highKeyCheck);
} else {
// Searching on all key fields.
expectedSubset = ((TreeSet<CheckTuple>) ctx.getCheckTuples()).subSet(lowKeyCheck, lowKeyInclusive,
@@ -246,8 +246,8 @@
throws HyracksDataException {
int fieldCount = ctx.getFieldCount();
int numTuples = checkTuples.size();
- ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fieldCount + 1)
- : new ArrayTupleBuilder(fieldCount);
+ ArrayTupleBuilder tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fieldCount + 1) : new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
int c = 1;
@@ -298,7 +298,7 @@
// because we ignore duplicate keys.
ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
if (filtered) {
- addFilterField(ctx,minMax);
+ addFilterField(ctx, minMax);
}
} catch (HyracksDataException e) {
// Ignore duplicate key insertions.
@@ -476,8 +476,8 @@
}
@Override
- public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples,
- ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+ public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples, ISerializerDeserializer[] fieldSerdes,
+ int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
int actualCount = 0;
try {
while (cursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
index 2118f8c..3417066 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/btree/OrderedIndexUpsertTest.java
@@ -45,8 +45,8 @@
@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
- ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
- throws Exception {
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
+ ITupleReference prefixHighKey) throws Exception {
OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, false);
ctx.getIndex().create();
ctx.getIndex().activate();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java
index 8a6996b..31c55b3 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/CheckTuple.java
@@ -19,7 +19,7 @@
package org.apache.hyracks.storage.am.common;
-@SuppressWarnings({"rawtypes", "unchecked"})
+@SuppressWarnings({ "rawtypes", "unchecked" })
public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
protected final int numKeys;
protected final Comparable[] fields;
@@ -101,7 +101,7 @@
StringBuilder strBuilder = new StringBuilder();
for (int i = 0; i < fields.length; i++) {
strBuilder.append(fields[i].toString());
- if (i != fields.length-1) {
+ if (i != fields.length - 1) {
strBuilder.append(" ");
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java
index 6ac02e1..2c08ba0 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/IndexTestContext.java
@@ -44,8 +44,8 @@
IndexAccessParameters actx =
new IndexAccessParameters(TestOperationCallback.INSTANCE, TestOperationCallback.INSTANCE);
this.indexAccessor = index.createAccessor(actx);
- this.tupleBuilder = filtered ? new ArrayTupleBuilder(fieldSerdes.length + 1)
- : new ArrayTupleBuilder(fieldSerdes.length);
+ this.tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fieldSerdes.length + 1) : new ArrayTupleBuilder(fieldSerdes.length);
}
@Override
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java
index 60d88e5..d796ece 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationCallback.java
@@ -25,7 +25,7 @@
import org.apache.hyracks.storage.common.IModificationOperationCallback;
import org.apache.hyracks.storage.common.ISearchOperationCallback;
-public enum TestOperationCallback implements ISearchOperationCallback,IModificationOperationCallback {
+public enum TestOperationCallback implements ISearchOperationCallback, IModificationOperationCallback {
INSTANCE;
private static final int RANDOM_SEED = 50;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java
index f804f89..e4c4332 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TestOperationSelector.java
@@ -21,7 +21,6 @@
import org.apache.hyracks.storage.am.common.datagen.ProbabilityHelper;
-
public class TestOperationSelector {
public static enum TestOperation {
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
index 43258dd..f0b01a2 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -108,8 +108,8 @@
CheckTuple checkTuple = createCheckTuple(fieldSerdes.length, numKeys);
int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
for (int i = 0; i < fieldCount; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
+ ByteArrayInputStream inStream =
+ new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
DataInput dataIn = new DataInputStream(inStream);
Comparable fieldObj = (Comparable) fieldSerdes[i].deserialize(dataIn);
checkTuple.appendField(fieldObj);
@@ -142,8 +142,8 @@
while (diskOrderCursor.hasNext()) {
diskOrderCursor.next();
ITupleReference tuple = diskOrderCursor.getTuple();
- CheckTuple checkTuple = createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(),
- ctx.getKeyFieldCount());
+ CheckTuple checkTuple =
+ createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(), ctx.getKeyFieldCount());
if (!checkDiskOrderScanResult(tuple, checkTuple, ctx)) {
fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
}
@@ -315,8 +315,8 @@
throws HyracksDataException {
int fieldCount = ctx.getFieldCount();
int numTuples = checkTuples.size();
- ArrayTupleBuilder tupleBuilder = filtered ? new ArrayTupleBuilder(fieldCount + 1)
- : new ArrayTupleBuilder(fieldCount);
+ ArrayTupleBuilder tupleBuilder =
+ filtered ? new ArrayTupleBuilder(fieldCount + 1) : new ArrayTupleBuilder(fieldCount);
ArrayTupleReference tuple = new ArrayTupleReference();
// Perform bulk load.
IIndexBulkLoader bulkLoader = ctx.getIndex().createBulkLoader(0.7f, false, numTuples, false);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
index dbd4bfc..986eccc 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/IFieldValueGenerator.java
@@ -21,5 +21,6 @@
public interface IFieldValueGenerator<T> {
public T next();
+
public void reset();
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
index 256eaf5..da7f52c 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/PersonNameFieldValueGenerator.java
@@ -40,8 +40,7 @@
private List<String> firstNames = new ArrayList<>();
private List<String> lastNames = new ArrayList<>();
- public PersonNameFieldValueGenerator(Random rnd, double middleInitialProb)
- throws IOException {
+ public PersonNameFieldValueGenerator(Random rnd, double middleInitialProb) throws IOException {
this.rnd = rnd;
this.middleInitialProb = middleInitialProb;
initNames();
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java
index c34c7bc..84cda9b 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleBatch.java
@@ -31,7 +31,8 @@
private final TupleGenerator[] tupleGens;
public final AtomicBoolean inUse = new AtomicBoolean(false);
- public TupleBatch(int size, IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {
+ public TupleBatch(int size, IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes,
+ int payloadSize) {
this.size = size;
tupleGens = new TupleGenerator[size];
for (int i = 0; i < size; i++) {
@@ -40,7 +41,7 @@
}
public void generate() throws IOException {
- for(TupleGenerator tupleGen : tupleGens) {
+ for (TupleGenerator tupleGen : tupleGens) {
tupleGen.next();
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java
index 4f26065..eb76e67 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/common/datagen/TupleGenerator.java
@@ -27,7 +27,7 @@
import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import org.apache.hyracks.dataflow.common.data.accessors.ITupleReference;
-@SuppressWarnings({"rawtypes", "unchecked" })
+@SuppressWarnings({ "rawtypes", "unchecked" })
public class TupleGenerator {
protected final ISerializerDeserializer[] fieldSerdes;
protected final IFieldValueGenerator[] fieldGens;
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
index ce486fd..2d3289c 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
@@ -56,8 +56,8 @@
} else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
}
- int numTuplesPerDeleteRound = (int) Math
- .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+ int numTuplesPerDeleteRound =
+ (int) Math.ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
for (int j = 0; j < numDeleteRounds; j++) {
rTreeTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
rTreeTestUtils.checkScan(ctx);
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
index 1f71889..9064225 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
@@ -71,8 +71,8 @@
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000, -1000
// and the top right coordinates are 1000, 1000
ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
@@ -92,8 +92,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0 and the top right coordinates are 1000.0, 1000.0
ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
@@ -115,13 +115,13 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 8;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
// 1000.0, 1000.0, 1000.0
- ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
- 1000.0);
+ ITupleReference key =
+ TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0, 1000.0);
runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RTREE);
}
@@ -143,8 +143,8 @@
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, IntegerPointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000, -1000
// and the top right coordinates are 1000, 1000
ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
@@ -170,8 +170,8 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 4;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0 and the top right coordinates are 1000.0, 1000.0
ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
@@ -199,13 +199,13 @@
DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
int numKeys = 8;
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils
- .createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
+ IPrimitiveValueProviderFactory[] valueProviderFactories =
+ RTreeUtils.createPrimitiveValueProviderFactories(numKeys, DoublePointable.FACTORY);
// Range search, the rectangle bottom left coordinates are -1000.0,
// -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
// 1000.0, 1000.0, 1000.0
- ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
- 1000.0);
+ ITupleReference key =
+ TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0, 1000.0);
runTest(fieldSerdes, valueProviderFactories, numKeys, key, RTreePolicyType.RSTARTREE);
}
diff --git a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
index 300a1ff..f48da3a1 100644
--- a/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
+++ b/hyracks-fullstack/hyracks/hyracks-test-support/src/main/java/org/apache/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -173,8 +173,8 @@
}
@Override
- public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples,
- ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+ public void checkExpectedResults(IIndexCursor cursor, Collection checkTuples, ISerializerDeserializer[] fieldSerdes,
+ int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
int actualCount = 0;
try {
while (cursor.hasNext()) {
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
index 2d355da..95c308a 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/BTreePageSizePerf.java
@@ -35,7 +35,7 @@
public static void main(String[] args) throws Exception {
// Disable logging so we can better see the output times.
Enumeration<String> loggers = LogManager.getLogManager().getLoggerNames();
- while(loggers.hasMoreElements()) {
+ while (loggers.hasMoreElements()) {
String loggerName = loggers.nextElement();
Logger logger = LogManager.getLogManager().getLogger(loggerName);
logger.setLevel(Level.OFF);
@@ -45,10 +45,12 @@
int batchSize = 10000;
int numBatches = numTuples / batchSize;
- ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, 30);
- IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
+ IBinaryComparatorFactory[] cmpFactories =
+ SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
runExperiment(numBatches, batchSize, 1024, 100000, fieldSerdes, cmpFactories, typeTraits);
runExperiment(numBatches, batchSize, 2048, 100000, fieldSerdes, cmpFactories, typeTraits);
@@ -61,7 +63,9 @@
runExperiment(numBatches, batchSize, 262144, 391, fieldSerdes, cmpFactories, typeTraits);
}
- private static void runExperiment(int numBatches, int batchSize, int pageSize, int numPages, ISerializerDeserializer[] fieldSerdes, IBinaryComparatorFactory[] cmpFactories, ITypeTraits[] typeTraits) throws Exception {
+ private static void runExperiment(int numBatches, int batchSize, int pageSize, int numPages,
+ ISerializerDeserializer[] fieldSerdes, IBinaryComparatorFactory[] cmpFactories, ITypeTraits[] typeTraits)
+ throws Exception {
System.out.println("PAGE SIZE: " + pageSize);
System.out.println("NUM PAGES: " + numPages);
System.out.println("MEMORY: " + (pageSize * numPages));
@@ -72,7 +76,8 @@
runner.init();
int numThreads = 1;
for (int i = 0; i < repeats; i++) {
- DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, 30, 50, 10, false);
+ DataGenThread dataGen =
+ new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, 30, 50, 10, false);
dataGen.start();
times[i] = runner.runExperiment(dataGen, numThreads);
System.out.println("TIME " + i + ": " + times[i] + "ms");
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
index 3f14f62..16d1208 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/perf/PerfExperiment.java
@@ -41,7 +41,7 @@
logger.setLevel(Level.OFF);
}
boolean sorted = Boolean.parseBoolean(args[0]);
- int numThreads = Integer.parseInt(args[1]);
+ int numThreads = Integer.parseInt(args[1]);
//int numTuples = 100000; // 100K
//int numTuples = 1000000; // 1M
@@ -58,11 +58,12 @@
int numBatches = numTuples / batchSize;
int payLoadSize = 240;
- ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] fieldSerdes =
+ new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, payLoadSize);
- IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
- fieldSerdes.length);
+ IBinaryComparatorFactory[] cmpFactories =
+ SerdeUtils.serdesToComparatorFactories(fieldSerdes, fieldSerdes.length);
int[] bloomFilterKeyFields = new int[cmpFactories.length];
for (int i = 0; i < bloomFilterKeyFields.length; i++) {
bloomFilterKeyFields[i] = i;
@@ -73,8 +74,8 @@
int repeats = 1;
long[] times = new long[repeats];
-// int numThreads = 4;
-// boolean sorted = true;
+ // int numThreads = 4;
+ // boolean sorted = true;
for (int i = 0; i < repeats; i++) {
//ConcurrentSkipListRunner runner = new ConcurrentSkipListRunner(numBatches, batchSize, tupleSize, typeTraits, cmp);
//InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, 8192, 100000, typeTraits, cmpFactories);
@@ -90,7 +91,8 @@
int onDiskNumPages = 16384; // 2GB
LSMTreeRunner runner = new LSMTreeRunner(numBatches, inMemPageSize, inMemNumPages, onDiskPageSize,
onDiskNumPages, typeTraits, cmpFactories, bloomFilterKeyFields, bloomFilterFalsePositiveRate);
- DataGenThread dataGen = new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, payLoadSize, 50, 10, sorted);
+ DataGenThread dataGen =
+ new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, payLoadSize, 50, 10, sorted);
dataGen.start();
runner.reset();
times[i] = runner.runExperiment(dataGen, numThreads);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
index 4c51520..c0d9bb8 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/org/apache/hyracks/storage/am/lsm/btree/tuples/LSMBTreeTuplesTest.java
@@ -70,10 +70,11 @@
// Create and write tuple to bytes using an LSMBTreeTupleWriter.
LSMBTreeTupleWriter maxMatterTupleWriter =
new LSMBTreeTupleWriter(maxTypeTraits, numKeyFields, false, false);
- ITupleReference maxTuple = TupleUtils.createTuple(maxFieldSerdes, (Object[])maxFields);
+ ITupleReference maxTuple = TupleUtils.createTuple(maxFieldSerdes, (Object[]) maxFields);
ByteBuffer maxMatterBuf = writeTuple(maxTuple, maxMatterTupleWriter);
// Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
- LSMBTreeTupleReference maxLsmBTreeTuple = (LSMBTreeTupleReference) maxMatterTupleWriter.createTupleReference();
+ LSMBTreeTupleReference maxLsmBTreeTuple =
+ (LSMBTreeTupleReference) maxMatterTupleWriter.createTupleReference();
ISerializerDeserializer[] fieldSerdes = Arrays.copyOfRange(maxFieldSerdes, 0, numFields);
ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
@@ -84,7 +85,7 @@
fields[j] = fieldGens[j].next();
}
// Create and write tuple to bytes using an LSMBTreeTupleWriter.
- ITupleReference tuple = TupleUtils.createTuple(fieldSerdes, (Object[])fields);
+ ITupleReference tuple = TupleUtils.createTuple(fieldSerdes, (Object[]) fields);
LSMBTreeTupleWriter matterTupleWriter = new LSMBTreeTupleWriter(typeTraits, numKeyFields, false, false);
LSMBTreeTupleWriter antimatterTupleWriter =
new LSMBTreeTupleWriter(typeTraits, numKeyFields, true, false);
@@ -98,7 +99,8 @@
}
// Tuple reference should work for both matter and antimatter tuples (doesn't matter which factory creates it).
- LSMBTreeTupleReference lsmBTreeTuple = (LSMBTreeTupleReference) matterTupleWriter.createTupleReference();
+ LSMBTreeTupleReference lsmBTreeTuple =
+ (LSMBTreeTupleReference) matterTupleWriter.createTupleReference();
// Use LSMBTree tuple reference to interpret the written tuples.
// Repeat the block inside to test that repeated resetting to matter/antimatter tuples works.
@@ -145,7 +147,8 @@
}
}
- private void checkTuple(LSMBTreeTupleReference tuple, int expectedFieldCount, boolean expectedAntimatter, ISerializerDeserializer[] fieldSerdes, Object[] expectedFields) throws HyracksDataException {
+ private void checkTuple(LSMBTreeTupleReference tuple, int expectedFieldCount, boolean expectedAntimatter,
+ ISerializerDeserializer[] fieldSerdes, Object[] expectedFields) throws HyracksDataException {
assertEquals(expectedFieldCount, tuple.getFieldCount());
assertEquals(expectedAntimatter, tuple.isAntimatter());
Object[] deserMatterTuple = TupleUtils.deserializeTuple(tuple, fieldSerdes);
@@ -156,22 +159,20 @@
@Test
public void testLSMBTreeTuple() throws HyracksDataException {
- ISerializerDeserializer[] intFields = new IntegerSerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] intFields =
+ new IntegerSerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
testLSMBTreeTuple(intFields);
- ISerializerDeserializer[] stringFields = new ISerializerDeserializer[] {
+ ISerializerDeserializer[] stringFields = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- new UTF8StringSerializerDeserializer() };
+ new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer() };
testLSMBTreeTuple(stringFields);
- ISerializerDeserializer[] mixedFields = new ISerializerDeserializer[] {
- new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE,
- new UTF8StringSerializerDeserializer(), new UTF8StringSerializerDeserializer(),
- IntegerSerializerDeserializer.INSTANCE };
+ ISerializerDeserializer[] mixedFields = new ISerializerDeserializer[] { new UTF8StringSerializerDeserializer(),
+ IntegerSerializerDeserializer.INSTANCE, new UTF8StringSerializerDeserializer(),
+ new UTF8StringSerializerDeserializer(), IntegerSerializerDeserializer.INSTANCE };
testLSMBTreeTuple(mixedFields);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java
index af37d80..6ec661e 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/LSMComponentFilterReferenceTest.java
@@ -49,7 +49,7 @@
byte[] serFilter = filter.getByteArray();
LSMComponentFilterReference deserFilter = new LSMComponentFilterReference(
new TypeAwareTupleWriter((new ITypeTraits[] { IntegerPointable.TYPE_TRAITS })));
- deserFilter.set(serFilter,0,20);
+ deserFilter.set(serFilter, 0, 20);
Assert.assertTrue(deserFilter.isMaxTupleSet() && deserFilter.isMinTupleSet());
Assert.assertEquals(
TupleUtils.deserializeTuple(deserFilter.getMinTuple(),
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java
index f4ec55d..fd85824 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/org/apache/hyracks/storage/am/lsm/common/test/VirtualFreePageManagerTest.java
@@ -35,7 +35,8 @@
private final int NUM_PAGES = 100;
- private void testInMemoryFreePageManager(VirtualFreePageManager virtualFreePageManager) throws HyracksDataException {
+ private void testInMemoryFreePageManager(VirtualFreePageManager virtualFreePageManager)
+ throws HyracksDataException {
// The first two pages are reserved for the BTree's metadata page and
// root page.
int capacity = NUM_PAGES - 2;
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
index 9ae9940..bfe70e6 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/multithread/LSMInvertedIndexMultiThreadTestDriver.java
@@ -40,6 +40,7 @@
}
public DataGenThread createDatagenThread(int numThreads, int numBatches, int batchSize) {
- return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, fieldGens, RANDOM_SEED, 2 * numThreads);
+ return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, fieldGens, RANDOM_SEED,
+ 2 * numThreads);
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
index 6e764c3..b536ef2 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/NGramTokenizerTest.java
@@ -75,8 +75,8 @@
void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost) throws IOException {
HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false,
- false, tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
ArrayList<String> expectedGrams = new ArrayList<String>();
@@ -124,8 +124,8 @@
void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost) throws IOException {
HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
- tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
ArrayList<String> expectedGrams = new ArrayList<String>();
@@ -164,8 +164,8 @@
void runTestNGramTokenizerWithUTF8Tokens(boolean prePost) throws IOException {
UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
- NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
- tokenFactory);
+ NGramUTF8StringBinaryTokenizer tokenizer =
+ new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
ArrayList<String> expectedGrams = new ArrayList<String>();
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
index 78ba6a3..11a0b02 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-am-lsm-invertedindex-test/src/test/java/org/apache/hyracks/storage/am/lsm/invertedindex/tokenizers/WordTokenizerTest.java
@@ -108,8 +108,8 @@
public void testWordTokenizerWithCountedHashedUTF8Tokens() throws IOException {
HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
- DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(false, false,
- tokenFactory);
+ DelimitedUTF8StringBinaryTokenizer tokenizer =
+ new DelimitedUTF8StringBinaryTokenizer(false, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
@@ -140,8 +140,8 @@
public void testWordTokenizerWithHashedUTF8Tokens() throws IOException {
HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
- DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
- tokenFactory);
+ DelimitedUTF8StringBinaryTokenizer tokenizer =
+ new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
@@ -172,8 +172,8 @@
public void testWordTokenizerWithUTF8Tokens() throws IOException {
UTF8WordTokenFactory tokenFactory = new UTF8WordTokenFactory();
- DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
- tokenFactory);
+ DelimitedUTF8StringBinaryTokenizer tokenizer =
+ new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
tokenizer.reset(inputBuffer, 0, inputBuffer.length);
diff --git a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java
index 3d6e3ef..e2a875b 100644
--- a/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/org/apache/hyracks/storage/common/IOManagerPathTest.java
@@ -38,8 +38,8 @@
public void testPrefixNames() throws HyracksDataException {
IODeviceHandle shorter = new IODeviceHandle(new File("/tmp/tst/1"), "storage");
IODeviceHandle longer = new IODeviceHandle(new File("/tmp/tst/11"), "storage");
- IOManager ioManager = new IOManager(Arrays.asList(new IODeviceHandle[] { shorter, longer }),
- new DefaultDeviceResolver());
+ IOManager ioManager =
+ new IOManager(Arrays.asList(new IODeviceHandle[] { shorter, longer }), new DefaultDeviceResolver());
FileReference f = ioManager.resolveAbsolutePath("/tmp/tst/11/storage/Foo_idx_foo/my_btree");
Assert.assertEquals("/tmp/tst/11/storage/Foo_idx_foo/my_btree", f.getAbsolutePath());
}
@@ -48,8 +48,8 @@
public void testDuplicates() throws HyracksDataException {
IODeviceHandle first = new IODeviceHandle(new File("/tmp/tst/1"), "storage");
IODeviceHandle second = new IODeviceHandle(new File("/tmp/tst/1"), "storage");
- IOManager ioManager = new IOManager(Arrays.asList(new IODeviceHandle[] { first, second }),
- new DefaultDeviceResolver());
+ IOManager ioManager =
+ new IOManager(Arrays.asList(new IODeviceHandle[] { first, second }), new DefaultDeviceResolver());
}
@After
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
index dbfe6f9..e4969f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/StorageUtil.java
@@ -166,7 +166,7 @@
return bytes + " B";
}
final int baseValue = (63 - Long.numberOfLeadingZeros(bytes)) / 10;
- final char bytePrefix = " kMGTPE" .charAt(baseValue);
+ final char bytePrefix = " kMGTPE".charAt(baseValue);
final long divisor = 1L << (baseValue * 10);
if (bytes % divisor == 0) {
return String.format("%d %sB", bytes / divisor, bytePrefix);
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java
index 257daee..9f527e9 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Parser.java
@@ -152,7 +152,7 @@
break;
}
- j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
+ j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
int padSize = length - j;
if (padSize > 2) // something is wrong with base64. be safe and go with the upper bound
{
@@ -180,7 +180,7 @@
break;
}
- j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
+ j++; // text.charAt(j) is now at some base64 char, so +1 to make it the size
int padSize = length - j;
if (padSize > 2) // something is wrong with base64. be safe and go with the upper bound
{
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java
index 0e1c078..65557b1 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/Base64Printer.java
@@ -34,12 +34,8 @@
int i;
for (i = offset; remaining >= 3; remaining -= 3, i += 3) {
appendable.append(encode(input[i] >> 2));
- appendable.append(encode(
- ((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF)));
- appendable.append(encode(
- ((input[i + 1] & 0xF) << 2)
- | ((input[i + 2] >> 6) & 0x3)));
+ appendable.append(encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF)));
+ appendable.append(encode(((input[i + 1] & 0xF) << 2) | ((input[i + 2] >> 6) & 0x3)));
appendable.append(encode(input[i + 2] & 0x3F));
}
// encode when exactly 1 element (left) to encode
@@ -52,8 +48,7 @@
// encode when exactly 2 elements (left) to encode
if (remaining == 2) {
appendable.append(encode(input[i] >> 2));
- appendable.append(encode(((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF)));
+ appendable.append(encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF)));
appendable.append(encode((input[i + 1] & 0xF) << 2));
appendable.append('=');
}
@@ -73,12 +68,8 @@
int i;
for (i = offset; remaining >= 3; remaining -= 3, i += 3) {
buf[ptr++] = encode(input[i] >> 2);
- buf[ptr++] = encode(
- ((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF));
- buf[ptr++] = encode(
- ((input[i + 1] & 0xF) << 2)
- | ((input[i + 2] >> 6) & 0x3));
+ buf[ptr++] = encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF));
+ buf[ptr++] = encode(((input[i + 1] & 0xF) << 2) | ((input[i + 2] >> 6) & 0x3));
buf[ptr++] = encode(input[i + 2] & 0x3F);
}
// encode when exactly 1 element (left) to encode
@@ -91,8 +82,7 @@
// encode when exactly 2 elements (left) to encode
if (remaining == 2) {
buf[ptr++] = encode(input[i] >> 2);
- buf[ptr++] = encode(((input[i] & 0x3) << 4)
- | ((input[i + 1] >> 4) & 0xF));
+ buf[ptr++] = encode(((input[i] & 0x3) << 4) | ((input[i + 1] >> 4) & 0xF));
buf[ptr++] = encode((input[i + 1] & 0xF) << 2);
buf[ptr++] = '=';
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java
index ba7276b..46bc0a4 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/bytes/HexParser.java
@@ -21,9 +21,7 @@
public class HexParser {
public static boolean isValidHexChar(char c) {
- if (c >= '0' && c <= '9'
- || c >= 'a' && c <= 'f'
- || c >= 'A' && c <= 'F') {
+ if (c >= '0' && c <= '9' || c >= 'a' && c <= 'f' || c >= 'A' && c <= 'F') {
return true;
}
return false;
@@ -79,19 +77,17 @@
}
}
- public static void generateByteArrayFromHexString(char[] input, int start, int length, byte[] output,
- int offset) {
+ public static void generateByteArrayFromHexString(char[] input, int start, int length, byte[] output, int offset) {
for (int i = 0; i < length; i += 2) {
- output[offset + i / 2] = (byte) ((getValueFromValidHexChar(input[start + i]) << 4) +
- getValueFromValidHexChar(input[start + i + 1]));
+ output[offset + i / 2] = (byte) ((getValueFromValidHexChar(input[start + i]) << 4)
+ + getValueFromValidHexChar(input[start + i + 1]));
}
}
- public static void generateByteArrayFromHexString(byte[] input, int start, int length, byte[] output,
- int offset) {
+ public static void generateByteArrayFromHexString(byte[] input, int start, int length, byte[] output, int offset) {
for (int i = 0; i < length; i += 2) {
- output[offset + i / 2] = (byte) ((getValueFromValidHexChar((char) input[start + i]) << 4) +
- getValueFromValidHexChar((char) input[start + i + 1]));
+ output[offset + i / 2] = (byte) ((getValueFromValidHexChar((char) input[start + i]) << 4)
+ + getValueFromValidHexChar((char) input[start + i + 1]));
}
}
}
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
index cd654d7..5d69448 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/string/UTF8StringUtil.java
@@ -541,8 +541,8 @@
if (((char2 & 0xC0) != 0x80) || ((char3 & 0xC0) != 0x80)) {
throw new UTFDataFormatException("malformed input around byte " + (count - 1));
}
- chararr[chararr_count++] = (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6)
- | ((char3 & 0x3F) << 0));
+ chararr[chararr_count++] =
+ (char) (((c & 0x0F) << 12) | ((char2 & 0x3F) << 6) | ((char3 & 0x3F) << 0));
break;
default:
/* 10xx xxxx, 1111 xxxx */
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java
index 5313514..ea3793d 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/main/java/org/apache/hyracks/util/trace/Tracer.java
@@ -38,8 +38,8 @@
protected static final Level TRACE_LOG_LEVEL = Level.INFO;
protected static final String CAT = "Tracer";
- protected static final ThreadLocal<DateFormat> DATE_FORMAT = ThreadLocal
- .withInitial(() -> new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"));
+ protected static final ThreadLocal<DateFormat> DATE_FORMAT =
+ ThreadLocal.withInitial(() -> new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX"));
protected final Logger traceLog;
protected long categories;
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java
index c3443c8..a8762cd 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/MathTest.java
@@ -32,9 +32,9 @@
Random random = new Random(System.currentTimeMillis());
for (int i = 0; i < 31; i++) {
assertTrue(MathUtil.log2Floor((int) Math.pow(2, i)) == i);
- for(int x = 0; x < 10; x++){
+ for (int x = 0; x < 10; x++) {
float extra = random.nextFloat();
- while (extra >= 1.0){
+ while (extra >= 1.0) {
extra = random.nextFloat();
}
assertTrue(MathUtil.log2Floor((int) Math.pow(2, i + extra)) == i);
diff --git a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java
index f200384..5a614f0 100644
--- a/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java
+++ b/hyracks-fullstack/hyracks/hyracks-util/src/test/java/org/apache/hyracks/util/string/UTF8StringUtilTest.java
@@ -92,7 +92,11 @@
return r2 == 0;
}
- enum OPTION {STANDARD, RAW_BYTE, LOWERCASE}
+ enum OPTION {
+ STANDARD,
+ RAW_BYTE,
+ LOWERCASE
+ }
public void testCompare(String str1, String str2, OPTION option) throws IOException {
byte[] buffer1 = writeStringToBytes(str1);
diff --git a/hyracks-fullstack/pom.xml b/hyracks-fullstack/pom.xml
index e03fb8a..a82355d 100644
--- a/hyracks-fullstack/pom.xml
+++ b/hyracks-fullstack/pom.xml
@@ -377,6 +377,21 @@
</execution>
</executions>
</plugin>
+ <plugin>
+ <groupId>net.revelc.code.formatter</groupId>
+ <artifactId>formatter-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <goals>
+ <goal>${source-format.goal}</goal>
+ </goals>
+ </execution>
+ </executions>
+ <configuration>
+ <configFile>${root.dir}/AsterixCodeFormatProfile.xml</configFile>
+ <skipFormatting>${source-format.skip}</skipFormatting>
+ </configuration>
+ </plugin>
</plugins>
<pluginManagement>
<plugins>
@@ -523,31 +538,6 @@
</properties>
</profile>
<profile>
- <id>source-format</id>
- <activation>
- <activeByDefault>false</activeByDefault>
- </activation>
- <build>
- <plugins>
- <plugin>
- <groupId>net.revelc.code.formatter</groupId>
- <artifactId>formatter-maven-plugin</artifactId>
- <executions>
- <execution>
- <goals>
- <goal>${source-format.goal}</goal>
- </goals>
- </execution>
- </executions>
- <configuration>
- <configFile>${root.dir}/AsterixCodeFormatProfile.xml</configFile>
- <skipFormatting>${source-format.skip}</skipFormatting>
- </configuration>
- </plugin>
- </plugins>
- </build>
- </profile>
- <profile>
<id>skip-assembly</id>
<activation>
<file>
diff --git a/hyracks-fullstack/src/main/assembly/source.xml b/hyracks-fullstack/src/main/assembly/source.xml
index f5dd61f..4d668ce 100644
--- a/hyracks-fullstack/src/main/assembly/source.xml
+++ b/hyracks-fullstack/src/main/assembly/source.xml
@@ -24,24 +24,9 @@
<directory>${project.basedir}</directory>
<outputDirectory>/</outputDirectory>
<useDefaultExcludes>true</useDefaultExcludes>
- <includes>
- <include>**/LICENSE</include>
- <include>**/NOTICE</include>
- <include>**/README</include>
- <include>**/src/**</include>
- <include>**/results/**</include>
- <include>**/pom.xml</include>
- <include>**/build*.xml</include>
- <include>**/findbugs*.xml</include>
- <include>**/*.tbl</include>
- <include>**/*.tsv</include>
- <include>**/*.js</include>
- <include>**/*.txt</include>
- <include>**/*.piglet</include>
- <include>**/*.ddl</include>
- </includes>
<excludes>
- <exclude>**/${project.build.directory}/**</exclude>
+ <exclude>${project.build.directory}/**</exclude>
+ <exclude>release.properties</exclude>
</excludes>
</fileSet>
</fileSets>